semantic_logger 4.12.0 → 4.16.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (34) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +8 -5
  3. data/lib/semantic_logger/appender/async_batch.rb +0 -2
  4. data/lib/semantic_logger/appender/elasticsearch.rb +18 -17
  5. data/lib/semantic_logger/appender/elasticsearch_http.rb +1 -0
  6. data/lib/semantic_logger/appender/honeybadger_insights.rb +61 -0
  7. data/lib/semantic_logger/appender/http.rb +12 -6
  8. data/lib/semantic_logger/appender/new_relic_logs.rb +57 -0
  9. data/lib/semantic_logger/appender/rabbitmq.rb +8 -2
  10. data/lib/semantic_logger/appender/splunk.rb +2 -2
  11. data/lib/semantic_logger/appender/syslog.rb +1 -1
  12. data/lib/semantic_logger/appender/tcp.rb +1 -1
  13. data/lib/semantic_logger/appender/wrapper.rb +9 -3
  14. data/lib/semantic_logger/appender.rb +24 -22
  15. data/lib/semantic_logger/appenders.rb +7 -3
  16. data/lib/semantic_logger/base.rb +4 -3
  17. data/lib/semantic_logger/debug_as_trace_logger.rb +4 -15
  18. data/lib/semantic_logger/formatters/base.rb +0 -1
  19. data/lib/semantic_logger/formatters/logfmt.rb +1 -1
  20. data/lib/semantic_logger/formatters/new_relic_logs.rb +109 -0
  21. data/lib/semantic_logger/formatters/raw.rb +1 -1
  22. data/lib/semantic_logger/formatters/signalfx.rb +8 -2
  23. data/lib/semantic_logger/formatters.rb +12 -11
  24. data/lib/semantic_logger/levels.rb +18 -22
  25. data/lib/semantic_logger/log.rb +11 -5
  26. data/lib/semantic_logger/logger.rb +0 -1
  27. data/lib/semantic_logger/metric/statsd.rb +1 -0
  28. data/lib/semantic_logger/semantic_logger.rb +23 -1
  29. data/lib/semantic_logger/sync_processor.rb +7 -7
  30. data/lib/semantic_logger/test/capture_log_events.rb +32 -11
  31. data/lib/semantic_logger/test/minitest.rb +88 -0
  32. data/lib/semantic_logger/version.rb +1 -1
  33. data/lib/semantic_logger.rb +16 -43
  34. metadata +12 -4
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b09c37737f1e42edbeee95b036ba0581d5164d756e13de37aca9b22b3093cd45
4
- data.tar.gz: 9f66b72480df2371a7d305164e443f9667e1acd5521c1347d54c3841efbfa4c9
3
+ metadata.gz: 3d393f5d7fde6627e73b79c370fed2f83e212eb43199bfbebab15b5eeb8ce66b
4
+ data.tar.gz: c75577f1ad4f5c209cb7fb0028c8d5738e9c546b2df6875bcec40170b1ca1f5e
5
5
  SHA512:
6
- metadata.gz: fb27aa1a6d5dc1f2e0e6505ba9348a7cec091a153763361e8c9c20c21177bf84ac11ac2f21fc37ec994988ba31763dcd5c3134ac40a169ac987ed617eceec46e
7
- data.tar.gz: 5d084bc8cc57b1e836326eae912667ef6b7a284b0125c0b0acee678af78c01017b9667db9a43df7c0fb38af4998b0ab3128ac05d3e2bce266176e9c4d8fadccd
6
+ metadata.gz: f86046a5e749f8d69ff17b9c4c9c91ff30507436bfe91f181e0c0a3a8fe1529d0f176940be3c099a9267d5f60e3215610cd3e2f49a2e70d55f88cce7c2345ae9
7
+ data.tar.gz: 4277d124c94f200d0d6aff5f74f58217cfd48276323b1d47656037dfbcd7d40b21e2ece17b7f50b521098608ce5d08dbd786fcbd53866b8fd052fe17f18db40d
data/README.md CHANGED
@@ -1,7 +1,7 @@
1
1
  # Semantic Logger
2
2
  [![Gem Version](https://img.shields.io/gem/v/semantic_logger.svg)](https://rubygems.org/gems/semantic_logger) [![Build Status](https://github.com/reidmorrison/semantic_logger/workflows/build/badge.svg)](https://github.com/reidmorrison/semantic_logger/actions?query=workflow%3Abuild) [![Downloads](https://img.shields.io/gem/dt/semantic_logger.svg)](https://rubygems.org/gems/semantic_logger) [![License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](http://opensource.org/licenses/Apache-2.0) ![](https://img.shields.io/badge/status-Production%20Ready-blue.svg)
3
3
 
4
- Semantic Logger is a feature rich logging framework, and replacement for existing Ruby & Rails loggers.
4
+ Semantic Logger is a feature rich logging framework, and replacement for existing Ruby & Rails loggers.
5
5
 
6
6
  * https://logger.rocketjob.io/
7
7
 
@@ -21,7 +21,7 @@ Logging to the following destinations are all supported "out-of-the-box":
21
21
  * NewRelic
22
22
  * Splunk
23
23
  * MongoDB
24
- * Honeybadger
24
+ * Honeybadger (exceptions and events)
25
25
  * Sentry (both with legacy `sentry-raven` and modern `sentry-ruby` gem)
26
26
  * HTTP
27
27
  * TCP
@@ -54,7 +54,10 @@ The following gems are only required when their corresponding appenders are bein
54
54
  and are therefore not automatically included by this gem:
55
55
  - Bugsnag Appender: gem 'bugsnag'
56
56
  - MongoDB Appender: gem 'mongo' 1.9.2 or above
57
+ - Honeybadger Appender: gem 'honeybadger'
58
+ - HoneybadgerInsights Appender: gem 'honeybadger'
57
59
  - NewRelic Appender: gem 'newrelic_rpm'
60
+ - NewRelicLogs Appender: gem 'newrelic_rpm'
58
61
  - Syslog Appender: gem 'syslog_protocol' 0.9.2 or above
59
62
  - Syslog Appender to a remote syslogng server over TCP or UDP: gem 'net_tcp_client'
60
63
  - Splunk Appender: gem 'splunk-sdk-ruby'
@@ -128,16 +131,16 @@ logger.debug payload: {foo: 'foo', bar: 'bar'}
128
131
  Similarly, for measure blocks:
129
132
 
130
133
  ~~~ruby
131
- logger.measure_info('How long is the sleep', foo: 'foo', bar: 'bar') { sleep 1 }
134
+ logger.measure_info('How long is the sleep', foo: 'foo', bar: 'bar') { sleep 1 }
132
135
  ~~~
133
136
 
134
137
  Must be replaced with the following in v4:
135
138
 
136
139
  ~~~ruby
137
- logger.measure_info('How long is the sleep', payload: {foo: 'foo', bar: 'bar'}) { sleep 1 }
140
+ logger.measure_info('How long is the sleep', payload: {foo: 'foo', bar: 'bar'}) { sleep 1 }
138
141
  ~~~
139
142
 
140
- The common log call has not changed, and the payload is still logged directly:
143
+ The common log call has not changed, and the payload is still logged directly:
141
144
 
142
145
  ~~~ruby
143
146
  logger.debug('log this', foo: 'foo', bar: 'bar')
@@ -1,5 +1,3 @@
1
- require "concurrent"
2
-
3
1
  module SemanticLogger
4
2
  module Appender
5
3
  # Log asynchronously in batches using a separate thread.
@@ -37,6 +37,7 @@ module SemanticLogger
37
37
  #
38
38
  # type: [String]
39
39
  # Document type to associate with logs when they are written.
40
+ # Deprecated in Elasticsearch 7.0.0.
40
41
  # Default: 'log'
41
42
  #
42
43
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
@@ -146,7 +147,7 @@ module SemanticLogger
146
147
  @elasticsearch_args = elasticsearch_args.dup
147
148
  @elasticsearch_args[:url] = url if url && !elasticsearch_args[:hosts]
148
149
  @elasticsearch_args[:logger] = logger
149
- @data_stream = data_stream
150
+ @data_stream = data_stream
150
151
 
151
152
  super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: false, &block)
152
153
  reopen
@@ -176,11 +177,12 @@ module SemanticLogger
176
177
  private
177
178
 
178
179
  def write_to_elasticsearch(messages)
179
- bulk_result = if @data_stream
180
- @client.bulk(index: index, body: messages)
181
- else
182
- @client.bulk(body: messages)
183
- end
180
+ bulk_result =
181
+ if @data_stream
182
+ @client.bulk(index: index, body: messages)
183
+ else
184
+ @client.bulk(body: messages)
185
+ end
184
186
 
185
187
  return unless bulk_result["errors"]
186
188
 
@@ -190,22 +192,21 @@ module SemanticLogger
190
192
 
191
193
  def bulk_index(log)
192
194
  expanded_index_name = log.time.strftime("#{index}-#{date_pattern}")
193
- if @data_stream
194
- {"create" => {}}
195
- else
196
- {"index" => {"_index" => expanded_index_name, "_type" => type}}
197
- end
195
+ return {"create" => {}} if @data_stream
196
+
197
+ bulk_index = {"index" => {"_index" => expanded_index_name}}
198
+ bulk_index["index"].merge!({"_type" => type}) if version_supports_type?
199
+ bulk_index
198
200
  end
199
201
 
200
202
  def default_formatter
201
- time_key = if @data_stream
202
- "@timestamp"
203
- else
204
- :timestamp
205
- end
206
-
203
+ time_key = @data_stream ? "@timestamp" : :timestamp
207
204
  SemanticLogger::Formatters::Raw.new(time_format: :iso_8601, time_key: time_key)
208
205
  end
206
+
207
+ def version_supports_type?
208
+ Gem::Version.new(::Elasticsearch::VERSION) < Gem::Version.new(7)
209
+ end
209
210
  end
210
211
  end
211
212
  end
@@ -26,6 +26,7 @@ module SemanticLogger
26
26
  #
27
27
  # type: [String]
28
28
  # Document type to associate with logs when they are written.
29
+ # Deprecated in Elasticsearch 7.0.0
29
30
  # Default: 'log'
30
31
  #
31
32
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
@@ -0,0 +1,61 @@
1
+ begin
2
+ require "honeybadger"
3
+ rescue LoadError
4
+ raise LoadError, 'Gem honeybadger is required for logging purposes. Please add the gem "honeybadger" to your Gemfile.'
5
+ end
6
+
7
+ # Send log messages to honeybadger events/insights API
8
+ #
9
+ # Example:
10
+ # SemanticLogger.add_appender(appender: :honeybadger_insights)
11
+ #
12
+ module SemanticLogger
13
+ module Appender
14
+ class HoneybadgerInsights < SemanticLogger::Subscriber
15
+ # Honeybadger Appender
16
+ #
17
+ # Parameters
18
+ # level: [:trace | :debug | :info | :warn | :error | :fatal]
19
+ # Override the log level for this appender.
20
+ # Default: :error
21
+ #
22
+ # formatter: [Object|Proc|Symbol|Hash]
23
+ # An instance of a class that implements #call, or a Proc to be used to format
24
+ # the output from this appender
25
+ # Default: Use the built-in formatter (See: #call)
26
+ #
27
+ # filter: [Regexp|Proc]
28
+ # RegExp: Only include log messages where the class name matches the supplied.
29
+ # regular expression. All other messages will be ignored.
30
+ # Proc: Only include log messages where the supplied Proc returns true
31
+ # The Proc must return true or false.
32
+ #
33
+ # host: [String]
34
+ # Name of this host to appear in log messages.
35
+ # Default: SemanticLogger.host
36
+ #
37
+ # application: [String]
38
+ # Name of this application to appear in log messages.
39
+ # Default: SemanticLogger.application
40
+ def initialize(level: :info, **args, &block)
41
+ super(level: level, **args, &block)
42
+ end
43
+
44
+ # Send log to honeybadger events API
45
+ def log(log)
46
+ event = formatter.call(log, self)
47
+
48
+ ::Honeybadger.event(event)
49
+
50
+ true
51
+ end
52
+
53
+ private
54
+
55
+ # Use Raw Formatter by default
56
+ def default_formatter
57
+ SemanticLogger::Formatters::Raw.new(time_key: :ts, time_format: :rfc_3339)
58
+ end
59
+ end
60
+ end
61
+ end
@@ -48,6 +48,11 @@ module SemanticLogger
48
48
  # password: [String]
49
49
  # Password for basic Authentication.
50
50
  #
51
+ # header: [Hash]
52
+ # Custom HTTP headers to send with each request.
53
+ # Default: {} ( do not send any custom headers)
54
+ # Example: {"Authorization" => "Bearer BEARER_TOKEN"}
55
+ #
51
56
  # compress: [true|false]
52
57
  # Whether to compress the JSON string with GZip.
53
58
  # Default: false
@@ -95,6 +100,7 @@ module SemanticLogger
95
100
  ssl: {},
96
101
  username: nil,
97
102
  password: nil,
103
+ header: {},
98
104
  proxy_url: :ENV,
99
105
  open_timeout: 2.0,
100
106
  read_timeout: 1.0,
@@ -118,7 +124,7 @@ module SemanticLogger
118
124
  "Content-Type" => "application/json",
119
125
  "Connection" => "keep-alive",
120
126
  "Keep-Alive" => "300"
121
- }
127
+ }.merge(header)
122
128
  @header["Content-Encoding"] = "gzip" if @compress
123
129
 
124
130
  uri = URI.parse(@url)
@@ -160,10 +166,10 @@ module SemanticLogger
160
166
  end
161
167
 
162
168
  @http = if @proxy_uri
163
- Net::HTTP.new(server, port, @proxy_uri.host, @proxy_uri.port, @proxy_uri.user, @proxy_uri.password)
164
- else
165
- Net::HTTP.new(server, port, @proxy_url)
166
- end
169
+ Net::HTTP.new(server, port, @proxy_uri.host, @proxy_uri.port, @proxy_uri.user, @proxy_uri.password)
170
+ else
171
+ Net::HTTP.new(server, port, @proxy_url)
172
+ end
167
173
 
168
174
  if @ssl_options
169
175
  @http.methods.grep(/\A(\w+)=\z/) do |meth|
@@ -226,7 +232,7 @@ module SemanticLogger
226
232
  end
227
233
  request.basic_auth(@username, @password) if @username
228
234
  response = @http.request(request)
229
- if response.code == "200" || response.code == "201"
235
+ if response.is_a?(Net::HTTPSuccess)
230
236
  true
231
237
  else
232
238
  # Failures are logged to the global semantic logger failsafe logger (Usually stderr or file)
@@ -0,0 +1,57 @@
1
+ begin
2
+ require "newrelic_rpm"
3
+ rescue LoadError
4
+ raise LoadError, 'Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.'
5
+ end
6
+
7
+ require "semantic_logger/formatters/new_relic_logs"
8
+
9
+ # Send log messages to NewRelic
10
+ #
11
+ # All log entries will appear under
12
+ # "Logs" in New Relic
13
+ #
14
+ # == Caveats
15
+ #
16
+ # * The NewRelic agent only sends logs to NewRelic when log forwarding is enabled. There is however an open
17
+ # issue to get this fixed: https://github.com/newrelic/newrelic-ruby-agent/issues/1614. Please see the guide
18
+ # for a workaround.
19
+ #
20
+ # Example:
21
+ # SemanticLogger.add_appender(appender: :new_relic_logs)
22
+ module SemanticLogger
23
+ module Appender
24
+ class NewRelicLogs < SemanticLogger::Subscriber
25
+ # Create Appender
26
+ #
27
+ # Parameters
28
+ # level: [:trace | :debug | :info | :warn | :error | :fatal]
29
+ # Override the log level for this appender.
30
+ # Default: SemanticLogger.default_level
31
+ #
32
+ # formatter: [Object|Proc]
33
+ # An instance of a class that implements #call, or a Proc to be used to format
34
+ # the output from this appender
35
+ # Default: SemanticLogger::Formatters::NewRelicLogs
36
+ #
37
+ # filter: [Regexp|Proc]
38
+ # RegExp: Only include log messages where the class name matches the supplied.
39
+ # regular expression. All other messages will be ignored.
40
+ # Proc: Only include log messages where the supplied Proc returns true
41
+ # The Proc must return true or false.
42
+ def initialize(formatter: SemanticLogger::Formatters::NewRelicLogs.new, **args, &block)
43
+ super(formatter: formatter, **args, &block)
44
+ end
45
+
46
+ # Send an error notification to New Relic
47
+ def log(log)
48
+ self.class.log_newrelic(formatter.call(log, self).to_json, log.level.to_s.upcase)
49
+ true
50
+ end
51
+
52
+ def self.log_newrelic(message, level)
53
+ ::NewRelic::Agent.agent.log_event_aggregator.record(message, level)
54
+ end
55
+ end
56
+ end
57
+ end
@@ -60,6 +60,10 @@ module SemanticLogger
60
60
  # Name of this application to appear in log messages.
61
61
  # Default: SemanticLogger.application
62
62
  #
63
+ # metrics: [Boolean]
64
+ # Also send metrics only events to rabbitmq.
65
+ # Default: true
66
+ #
63
67
  # RabbitMQ Parameters:
64
68
  #
65
69
  # rabbitmq_host: [String]
@@ -76,13 +80,15 @@ module SemanticLogger
76
80
  # Default: nil
77
81
  #
78
82
  # more parameters supported by Bunny: http://rubybunny.info/articles/connecting.html
79
- def initialize(queue_name: "semantic_logger", rabbitmq_host: nil, metrics: false, **args, &block)
83
+ def initialize(queue_name: "semantic_logger", rabbitmq_host: nil,
84
+ level: nil, formatter: nil, filter: nil, application: nil, environment: nil, host: nil, metrics: true,
85
+ **args, &block)
80
86
  @queue_name = queue_name
81
87
  @rabbitmq_args = args.dup
82
88
  @rabbitmq_args[:host] = rabbitmq_host
83
89
  @rabbitmq_args[:logger] = logger
84
90
 
85
- super(level: level, formatter: formatter, filter: filter, application: application, host: host, metrics: metrics, &block)
91
+ super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
86
92
  reopen
87
93
  end
88
94
 
@@ -101,10 +101,10 @@ module SemanticLogger
101
101
  # open the handles to resources
102
102
  def reopen
103
103
  # Connect to splunk. Connect is a synonym for creating a Service by hand and calling login.
104
- self.service = Splunk.connect(config)
104
+ @service = ::Splunk.connect(config)
105
105
 
106
106
  # The index we are logging to
107
- self.service_index = service.indexes[index]
107
+ @service_index = service.indexes[index]
108
108
  end
109
109
 
110
110
  # Log the message to Splunk
@@ -208,7 +208,7 @@ module SemanticLogger
208
208
 
209
209
  # Flush is called by the semantic_logger during shutdown.
210
210
  def flush
211
- @remote_syslog.flush if @remote_syslog&.respond_to?(:flush)
211
+ @remote_syslog.flush if @remote_syslog.respond_to?(:flush)
212
212
  end
213
213
 
214
214
  # Returns [SemanticLogger::Formatters::Base] default formatter for this Appender depending on the protocal selected
@@ -191,7 +191,7 @@ module SemanticLogger
191
191
  Net::TCPClient.logger = logger
192
192
  Net::TCPClient.logger.name = "Net::TCPClient"
193
193
 
194
- super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, &block)
194
+ super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
195
195
  reopen
196
196
  end
197
197
 
@@ -1,7 +1,6 @@
1
1
  # Send log messages to any standard Ruby logging class.
2
2
  #
3
3
  # Forwards logging call to loggers such as Logger, log4r, etc.
4
- #
5
4
  module SemanticLogger
6
5
  module Appender
7
6
  class Wrapper < SemanticLogger::Subscriber
@@ -56,15 +55,22 @@ module SemanticLogger
56
55
  # trace entries are mapped to debug since :trace is not supported by the
57
56
  # Ruby or Rails Loggers
58
57
  def log(log)
59
- @logger.send(log.level == :trace ? :debug : log.level, formatter.call(log, self))
58
+ level = log.level == :trace ? :debug : log.level
59
+ @logger.send(level, formatter.call(log, self))
60
60
  true
61
61
  end
62
62
 
63
63
  # Flush all pending logs to disk.
64
- # Waits for all sent documents to be writted to disk
64
+ # Waits for all queued log messages to be written to disk.
65
65
  def flush
66
66
  @logger.flush if @logger.respond_to?(:flush)
67
67
  end
68
+
69
+ # Close underlying log
70
+ # Waits for all queued log messages to be written to disk.
71
+ def close
72
+ @logger.close if @logger.respond_to?(:close)
73
+ end
68
74
  end
69
75
  end
70
76
  end
@@ -1,28 +1,30 @@
1
1
  module SemanticLogger
2
2
  module Appender
3
3
  # @formatter:off
4
- autoload :Async, "semantic_logger/appender/async"
5
- autoload :AsyncBatch, "semantic_logger/appender/async_batch"
6
- autoload :Bugsnag, "semantic_logger/appender/bugsnag"
7
- autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
8
- autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
9
- autoload :File, "semantic_logger/appender/file"
10
- autoload :Graylog, "semantic_logger/appender/graylog"
11
- autoload :Honeybadger, "semantic_logger/appender/honeybadger"
12
- autoload :IO, "semantic_logger/appender/io"
13
- autoload :Kafka, "semantic_logger/appender/kafka"
14
- autoload :Sentry, "semantic_logger/appender/sentry"
15
- autoload :Http, "semantic_logger/appender/http"
16
- autoload :MongoDB, "semantic_logger/appender/mongodb"
17
- autoload :NewRelic, "semantic_logger/appender/new_relic"
18
- autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
19
- autoload :Splunk, "semantic_logger/appender/splunk"
20
- autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
21
- autoload :Syslog, "semantic_logger/appender/syslog"
22
- autoload :Tcp, "semantic_logger/appender/tcp"
23
- autoload :Udp, "semantic_logger/appender/udp"
24
- autoload :Wrapper, "semantic_logger/appender/wrapper"
25
- autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
4
+ autoload :Async, "semantic_logger/appender/async"
5
+ autoload :AsyncBatch, "semantic_logger/appender/async_batch"
6
+ autoload :Bugsnag, "semantic_logger/appender/bugsnag"
7
+ autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
8
+ autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
9
+ autoload :File, "semantic_logger/appender/file"
10
+ autoload :Graylog, "semantic_logger/appender/graylog"
11
+ autoload :Honeybadger, "semantic_logger/appender/honeybadger"
12
+ autoload :HoneybadgerInsights, "semantic_logger/appender/honeybadger_insights"
13
+ autoload :IO, "semantic_logger/appender/io"
14
+ autoload :Kafka, "semantic_logger/appender/kafka"
15
+ autoload :Sentry, "semantic_logger/appender/sentry"
16
+ autoload :Http, "semantic_logger/appender/http"
17
+ autoload :MongoDB, "semantic_logger/appender/mongodb"
18
+ autoload :NewRelic, "semantic_logger/appender/new_relic"
19
+ autoload :NewRelicLogs, "semantic_logger/appender/new_relic_logs"
20
+ autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
21
+ autoload :Splunk, "semantic_logger/appender/splunk"
22
+ autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
23
+ autoload :Syslog, "semantic_logger/appender/syslog"
24
+ autoload :Tcp, "semantic_logger/appender/tcp"
25
+ autoload :Udp, "semantic_logger/appender/udp"
26
+ autoload :Wrapper, "semantic_logger/appender/wrapper"
27
+ autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
26
28
  # @formatter:on
27
29
 
28
30
  # Returns [SemanticLogger::Subscriber] appender for the supplied options
@@ -6,6 +6,7 @@ module SemanticLogger
6
6
  def initialize(logger = Processor.logger.dup)
7
7
  @logger = logger
8
8
  @logger.name = self.class.name
9
+ super()
9
10
  end
10
11
 
11
12
  def add(**args, &block)
@@ -45,18 +46,21 @@ module SemanticLogger
45
46
  end
46
47
 
47
48
  def close
48
- to_a.each do |appender|
49
+ closed_appenders = []
50
+ each do |appender|
49
51
  logger.trace "Closing appender: #{appender.name}"
50
- delete(appender)
52
+ appenders << appender
51
53
  appender.flush
52
54
  appender.close
53
55
  rescue Exception => e
54
56
  logger.error "Failed to close appender: #{appender.name}", e
55
57
  end
58
+ # Delete appenders outside the #each above which prevents changes during iteration.
59
+ closed_appenders.each { |appender| delete(appender) }
56
60
  logger.trace "All appenders closed and removed from appender list"
57
61
  end
58
62
 
59
- # After a fork the appender thread is not running, start it if it is not running.
63
+ # After a fork reopen each appender.
60
64
  def reopen
61
65
  each do |appender|
62
66
  next unless appender.respond_to?(:reopen)
@@ -77,7 +77,7 @@ module SemanticLogger
77
77
  # # Log an exception in a semantic way
78
78
  # logger.info("Parsing received XML", exc)
79
79
  #
80
- SemanticLogger::LEVELS.each_with_index do |level, index|
80
+ SemanticLogger::Levels::LEVELS.each_with_index do |level, index|
81
81
  class_eval <<~METHODS, __FILE__, __LINE__ + 1
82
82
  def #{level}(message=nil, payload=nil, exception=nil, &block)
83
83
  if level_index <= #{index}
@@ -342,9 +342,10 @@ module SemanticLogger
342
342
  # Add result of block to message or payload if not nil
343
343
  if block_given?
344
344
  result = yield(log)
345
- if result.is_a?(String)
345
+ case result
346
+ when String
346
347
  log.message = log.message.nil? ? result : "#{log.message} -- #{result}"
347
- elsif result.is_a?(Hash)
348
+ when Hash
348
349
  log.assign_hash(result)
349
350
  end
350
351
  end
@@ -3,20 +3,9 @@ module SemanticLogger
3
3
  # This is useful for existing gems / libraries that log too much to debug
4
4
  # when most of the debug logging should be at the trace level
5
5
  class DebugAsTraceLogger < Logger
6
- def debug(*args, &block)
7
- trace(*args, &block)
8
- end
9
-
10
- def debug?
11
- trace?
12
- end
13
-
14
- def measure_debug(*args, &block)
15
- measure_trace(*args, &block)
16
- end
17
-
18
- def benchmark_debug(*args, &block)
19
- measure_trace(*args, &block)
20
- end
6
+ alias debug trace
7
+ alias debug? trace?
8
+ alias measure_debug measure_trace
9
+ alias benchmark_debug benchmark_trace
21
10
  end
22
11
  end
@@ -23,7 +23,6 @@ module SemanticLogger
23
23
  # See Time#strftime for the format of this string.
24
24
  # :iso_8601 Outputs an ISO8601 Formatted timestamp.
25
25
  # :ms Output in miliseconds since epoch.
26
- # nil: Returns Empty string for time ( no time is output ).
27
26
  # Default: '%Y-%m-%d %H:%M:%S.%<precision>N'
28
27
  # log_host: [Boolean]
29
28
  # Whether or not to include hostname in logs
@@ -31,7 +31,7 @@ module SemanticLogger
31
31
  private
32
32
 
33
33
  def raw_to_logfmt
34
- @parsed = @raw.slice(time_key, :level, :name, :message, :duration).merge(tag: "success")
34
+ @parsed = @raw.slice(time_key, :level, :name, :message, :duration, :duration_ms).merge(tag: "success")
35
35
  handle_tags
36
36
  handle_payload
37
37
  handle_exception
@@ -0,0 +1,109 @@
1
+ require "json"
2
+
3
+ begin
4
+ require "newrelic_rpm"
5
+ rescue LoadError
6
+ raise LoadError, 'Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.'
7
+ end
8
+
9
+ raise "NewRelic::Agent.linking_metadata is not defined. Please update newrelic_rpm gem version" unless NewRelic::Agent.respond_to?(:linking_metadata)
10
+
11
+ raise "NewRelic::Agent::Tracer.current_span_id is not defined. Please update newrelic_rpm gem version" unless NewRelic::Agent::Tracer.respond_to?(:current_span_id)
12
+
13
+ raise "NewRelic::Agent::Tracer.current_trace_id is not defined. Please update newrelic_rpm gem version" unless NewRelic::Agent::Tracer.respond_to?(:current_trace_id)
14
+
15
+ module SemanticLogger
16
+ module Formatters
17
+ # Formatter for reporting to NewRelic's Logger
18
+ #
19
+ # New Relic's logs do not support custom attributes out of the box, and therefore these
20
+ # have to be put into a single JSON serialized string under the +message+ key.
21
+ #
22
+ # In particular the following fields of the log object are serialized under the +message+
23
+ # key that's sent to NewRelic:
24
+ #
25
+ # * message
26
+ # * tags
27
+ # * named_tags
28
+ # * payload
29
+ # * metric
30
+ # * metric_amount
31
+ # * environment
32
+ # * application
33
+ #
34
+ # == New Relic Attributes not Supported
35
+ # * thread.id
36
+ # * class.name
37
+ # * method.name
38
+ #
39
+ # == Reference
40
+ # * Logging specification
41
+ # * https://github.com/newrelic/newrelic-exporter-specs/tree/master/logging
42
+ #
43
+ # * Metadata APIs
44
+ # * https://www.rubydoc.info/gems/newrelic_rpm/NewRelic/Agent#linking_metadata-instance_method
45
+ # * https://www.rubydoc.info/gems/newrelic_rpm/NewRelic/Agent/Tracer#current_trace_id-class_method
46
+ # * https://www.rubydoc.info/gems/newrelic_rpm/NewRelic/Agent/Tracer#current_span_id-class_method
47
+ #
48
+ class NewRelicLogs < Raw
49
+ def initialize(**args)
50
+ args.delete(:time_key)
51
+ args.delete(:time_format)
52
+
53
+ super(time_key: :timestamp, time_format: :ms, **args)
54
+ end
55
+
56
+ def call(log, logger)
57
+ hash = super(log, logger)
58
+
59
+ message = {
60
+ message: hash[:message].to_s,
61
+ tags: hash[:tags] || [],
62
+ named_tags: hash[:named_tags] || {},
63
+
64
+ **hash.slice(:metric, :metric_amount, :environment, :application, :payload)
65
+ }
66
+
67
+ message.merge!(duration: hash[:duration_ms]) if hash.key?(:duration_ms)
68
+ message.merge!(duration_human: hash[:duration]) if hash.key?(:duration)
69
+
70
+ result = {
71
+ **new_relic_metadata,
72
+ message: message.to_json,
73
+ timestamp: hash[:timestamp].to_i,
74
+ "log.level": log.level.to_s.upcase,
75
+ "logger.name": log.name,
76
+ "thread.name": log.thread_name.to_s
77
+ }
78
+
79
+ if hash[:exception]
80
+ result.merge!(
81
+ "error.message": hash[:exception][:message],
82
+ "error.class": hash[:exception][:name],
83
+ "error.stack": hash[:exception][:stack_trace].join("\n")
84
+ )
85
+ end
86
+
87
+ if hash[:file]
88
+ result.merge!(
89
+ "file.name": hash[:file],
90
+ "line.number": hash[:line].to_s
91
+ )
92
+ end
93
+
94
+ result
95
+ end
96
+
97
+ private
98
+
99
+ def new_relic_metadata
100
+ {
101
+ "trace.id": NewRelic::Agent::Tracer.current_trace_id,
102
+ "span.id": NewRelic::Agent::Tracer.current_span_id,
103
+ **NewRelic::Agent.linking_metadata
104
+ }.reject { |_k, v| v.nil? }.
105
+ map { |k, v| [k.to_sym, v] }.to_h
106
+ end
107
+ end
108
+ end
109
+ end
@@ -86,7 +86,7 @@ module SemanticLogger
86
86
 
87
87
  # Payload
88
88
  def payload
89
- hash[:payload] = log.payload if log.payload&.respond_to?(:empty?) && !log.payload.empty?
89
+ hash[:payload] = log.payload if log.payload.respond_to?(:empty?) && !log.payload.empty?
90
90
  end
91
91
 
92
92
  # Exception
@@ -82,7 +82,10 @@ module SemanticLogger
82
82
  self.log = log
83
83
  self.logger = logger
84
84
 
85
- metric; time; value; format_dimensions
85
+ metric
86
+ time
87
+ value
88
+ format_dimensions
86
89
 
87
90
  # gauge, counter, or cumulative_counter
88
91
  data = {}
@@ -113,7 +116,10 @@ module SemanticLogger
113
116
  self.hash = {}
114
117
  self.log = log
115
118
 
116
- metric; time; value; format_dimensions
119
+ metric
120
+ time
121
+ value
122
+ format_dimensions
117
123
 
118
124
  if log.duration
119
125
  gauges = (data[:gauge] ||= [])
@@ -1,16 +1,17 @@
1
1
  module SemanticLogger
2
2
  module Formatters
3
- autoload :Base, "semantic_logger/formatters/base"
4
- autoload :Color, "semantic_logger/formatters/color"
5
- autoload :Default, "semantic_logger/formatters/default"
6
- autoload :Json, "semantic_logger/formatters/json"
7
- autoload :Raw, "semantic_logger/formatters/raw"
8
- autoload :OneLine, "semantic_logger/formatters/one_line"
9
- autoload :Signalfx, "semantic_logger/formatters/signalfx"
10
- autoload :Syslog, "semantic_logger/formatters/syslog"
11
- autoload :Fluentd, "semantic_logger/formatters/fluentd"
12
- autoload :Logfmt, "semantic_logger/formatters/logfmt"
13
- autoload :SyslogCee, "semantic_logger/formatters/syslog_cee"
3
+ autoload :Base, "semantic_logger/formatters/base"
4
+ autoload :Color, "semantic_logger/formatters/color"
5
+ autoload :Default, "semantic_logger/formatters/default"
6
+ autoload :Json, "semantic_logger/formatters/json"
7
+ autoload :Raw, "semantic_logger/formatters/raw"
8
+ autoload :OneLine, "semantic_logger/formatters/one_line"
9
+ autoload :Signalfx, "semantic_logger/formatters/signalfx"
10
+ autoload :Syslog, "semantic_logger/formatters/syslog"
11
+ autoload :Fluentd, "semantic_logger/formatters/fluentd"
12
+ autoload :Logfmt, "semantic_logger/formatters/logfmt"
13
+ autoload :SyslogCee, "semantic_logger/formatters/syslog_cee"
14
+ autoload :NewRelicLogs, "semantic_logger/formatters/new_relic_logs"
14
15
 
15
16
  # Return formatter that responds to call.
16
17
  #
@@ -1,35 +1,31 @@
1
+ require "logger"
2
+
1
3
  module SemanticLogger
2
4
  module Levels
3
5
  # Logging levels in order of most detailed to most severe
4
6
  LEVELS = %i[trace debug info warn error fatal].freeze
5
7
 
8
+ # Map the built-in `Logger` levels to SemanticLogger levels.
9
+ MAPPED_LEVELS =
10
+ ::Logger::Severity.constants.each_with_object([]) do |constant, levels|
11
+ logger_value = ::Logger::Severity.const_get(constant)
12
+ levels[logger_value] = LEVELS.find_index(constant.downcase.to_sym) || LEVELS.find_index(:error)
13
+ end.freeze
14
+
6
15
  # Internal method to return the log level as an internal index
7
16
  # Also supports mapping the ::Logger levels to SemanticLogger levels
8
17
  def self.index(level)
9
18
  return if level.nil?
10
19
 
11
- index =
12
- if level.is_a?(Symbol)
13
- LEVELS.index(level)
14
- elsif level.is_a?(String)
15
- level = level.downcase.to_sym
16
- LEVELS.index(level)
17
- elsif level.is_a?(Integer) && defined?(::Logger::Severity)
18
- # Mapping of Rails and Ruby Logger levels to SemanticLogger levels
19
- @map_levels ||=
20
- begin
21
- levels = []
22
- ::Logger::Severity.constants.each do |constant|
23
- levels[::Logger::Severity.const_get(constant)] =
24
- LEVELS.find_index(constant.downcase.to_sym) || LEVELS.find_index(:error)
25
- end
26
- levels
27
- end
28
- @map_levels[level]
29
- end
30
- raise "Invalid level:#{level.inspect} being requested. Must be one of #{LEVELS.inspect}" unless index
31
-
32
- index
20
+ case level
21
+ when Symbol
22
+ LEVELS.index(level)
23
+ when String
24
+ LEVELS.index(level.downcase.to_sym)
25
+ when Integer
26
+ MAPPED_LEVELS[level]
27
+ end ||
28
+ raise(ArgumentError, "Invalid level:#{level.inspect} being requested. Must be one of #{LEVELS.inspect}")
33
29
  end
34
30
 
35
31
  # Returns the symbolic level for the supplied level index
@@ -144,7 +144,9 @@ module SemanticLogger
144
144
  raise(ArgumentError, "payload must be a Hash") unless payload.is_a?(Hash)
145
145
 
146
146
  message = nil if message == ""
147
- return payload if payload.key?(:payload)
147
+ if payload.key?(:payload)
148
+ return message ? payload.merge(message: message) : payload
149
+ end
148
150
 
149
151
  new_payload = {}
150
152
  args = {}
@@ -219,11 +221,11 @@ module SemanticLogger
219
221
 
220
222
  seconds = duration / 1000
221
223
  if seconds >= 86_400.0 # 1 day
222
- "#{(seconds / 86_400).to_i}d #{Time.at(seconds).strftime('%-Hh %-Mm')}"
224
+ "#{(seconds / 86_400).to_i}d #{Time.at(seconds).utc.strftime('%-Hh %-Mm')}"
223
225
  elsif seconds >= 3600.0 # 1 hour
224
- Time.at(seconds).strftime("%-Hh %-Mm")
226
+ Time.at(seconds).utc.strftime("%-Hh %-Mm")
225
227
  elsif seconds >= 60.0 # 1 minute
226
- Time.at(seconds).strftime("%-Mm %-Ss")
228
+ Time.at(seconds).utc.strftime("%-Mm %-Ss")
227
229
  elsif seconds >= 1.0 # 1 second
228
230
  "#{format('%.3f', seconds)}s"
229
231
  else
@@ -248,7 +250,11 @@ module SemanticLogger
248
250
 
249
251
  # Extract the filename and line number from the last entry in the supplied backtrace
250
252
  def extract_file_and_line(stack, short_name = false)
253
+ return unless stack&.size&.positive?
254
+
251
255
  match = CALLER_REGEXP.match(stack.first)
256
+ return unless match
257
+
252
258
  [short_name ? File.basename(match[1]) : match[1], match[2].to_i]
253
259
  end
254
260
 
@@ -256,7 +262,7 @@ module SemanticLogger
256
262
  # in either the backtrace or exception
257
263
  def file_name_and_line(short_name = false)
258
264
  stack = backtrace || exception&.backtrace
259
- extract_file_and_line(stack, short_name) if stack&.size&.positive?
265
+ extract_file_and_line(stack, short_name)
260
266
  end
261
267
 
262
268
  # Strip the standard Rails colorizing from the logged message
@@ -1,4 +1,3 @@
1
- require "concurrent"
2
1
  module SemanticLogger
3
2
  # Logger stores the class name to be used for all log messages so that every
4
3
  # log message written by this instance will include the class name
@@ -28,6 +28,7 @@ module SemanticLogger
28
28
  # )
29
29
  def initialize(url: "udp://localhost:8125")
30
30
  @url = url
31
+ super()
31
32
  end
32
33
 
33
34
  def reopen
@@ -1,4 +1,3 @@
1
- require "concurrent"
2
1
  require "socket"
3
2
 
4
3
  module SemanticLogger
@@ -517,4 +516,27 @@ module SemanticLogger
517
516
  @backtrace_level = :error
518
517
  @backtrace_level_index = Levels.index(@backtrace_level)
519
518
  @sync = false
519
+
520
+ # @formatter:off
521
+ module Metric
522
+ autoload :NewRelic, "semantic_logger/metric/new_relic"
523
+ autoload :Signalfx, "semantic_logger/metric/signalfx"
524
+ autoload :Statsd, "semantic_logger/metric/statsd"
525
+ end
526
+
527
+ module Reporters
528
+ autoload :Minitest, "semantic_logger/reporters/minitest"
529
+ end
530
+
531
+ module Test
532
+ autoload :CaptureLogEvents, "semantic_logger/test/capture_log_events"
533
+ autoload :Minitest, "semantic_logger/test/minitest"
534
+ end
535
+
536
+ if defined?(JRuby)
537
+ module JRuby
538
+ autoload :GarbageCollectionLogger, "semantic_logger/jruby/garbage_collection_logger"
539
+ end
540
+ end
541
+ # @formatter:on
520
542
  end
@@ -2,26 +2,26 @@ module SemanticLogger
2
2
  # The SyncProcessor performs logging in the current thread.
3
3
  #
4
4
  # Appenders are designed to only be used by one thread at a time, so all calls
5
- # are mutex protected in case SyncProcessor is being used in a multi-threaded environment.
5
+ # are monitor protected in case SyncProcessor is being used in a multi-threaded environment.
6
6
  class SyncProcessor
7
7
  def add(*args, &block)
8
- @mutex.synchronize { @appenders.add(*args, &block) }
8
+ @monitor.synchronize { @appenders.add(*args, &block) }
9
9
  end
10
10
 
11
11
  def log(*args, &block)
12
- @mutex.synchronize { @appenders.log(*args, &block) }
12
+ @monitor.synchronize { @appenders.log(*args, &block) }
13
13
  end
14
14
 
15
15
  def flush
16
- @mutex.synchronize { @appenders.flush }
16
+ @monitor.synchronize { @appenders.flush }
17
17
  end
18
18
 
19
19
  def close
20
- @mutex.synchronize { @appenders.close }
20
+ @monitor.synchronize { @appenders.close }
21
21
  end
22
22
 
23
23
  def reopen(*args)
24
- @mutex.synchronize { @appenders.reopen(*args) }
24
+ @monitor.synchronize { @appenders.reopen(*args) }
25
25
  end
26
26
 
27
27
  # Allow the internal logger to be overridden from its default of $stderr
@@ -47,7 +47,7 @@ module SemanticLogger
47
47
  attr_reader :appenders
48
48
 
49
49
  def initialize(appenders = nil)
50
- @mutex = Mutex.new
50
+ @monitor = Monitor.new
51
51
  @appenders = appenders || Appenders.new(self.class.logger.dup)
52
52
  end
53
53
 
@@ -4,30 +4,51 @@ module SemanticLogger
4
4
  #
5
5
  # Example:
6
6
  #
7
- # class UserTest < ActiveSupport::TestCase
8
- # describe User do
9
- # let(:capture_logger) { SemanticLogger::Test::CaptureLogEvents.new }
10
- # let(:user) { User.new }
7
+ # class UserTest < ActiveSupport::TestCase
8
+ # describe User do
9
+ # let(:logger) { SemanticLogger::Test::CaptureLogEvents.new }
10
+ # let(:user) { User.new }
11
11
  #
12
- # it "logs message" do
13
- # user.stub(:logger, capture_logger) do
14
- # user.enable!
12
+ # it "logs message" do
13
+ # user.stub(:logger, logger) do
14
+ # user.enable!
15
+ # end
16
+ # assert log = logger.events.first
17
+ # assert_equal "Hello World", log.message
18
+ # assert_equal :info, log.level
15
19
  # end
16
- # assert_equal "Hello World", capture_logger.events.last.message
17
- # assert_equal :info, capture_logger.events.last.level
18
20
  # end
19
21
  # end
20
- # end
21
22
  class CaptureLogEvents < SemanticLogger::Subscriber
22
23
  attr_accessor :events
23
24
 
24
25
  # By default collect all log levels, and collect metric only log events.
25
26
  def initialize(level: :trace, metrics: true)
26
27
  super(level: level, metrics: true)
28
+ @events = []
27
29
  end
28
30
 
29
31
  def log(log)
30
- (@events ||= []) << log
32
+ Logger.call_subscribers(log)
33
+ @events << log
34
+ end
35
+
36
+ # Supports batching of log events
37
+ def batch(logs)
38
+ @events += log
39
+ end
40
+
41
+ def clear
42
+ @events.clear
43
+ end
44
+
45
+ # Support silencing of log messages
46
+ def level_index
47
+ @level_index || SemanticLogger.default_level_index
48
+ end
49
+
50
+ def to_h
51
+ events.map(&:to_h)
31
52
  end
32
53
  end
33
54
  end
@@ -0,0 +1,88 @@
1
+ module SemanticLogger
2
+ module Test
3
+ module Minitest
4
+ # Returns [Array<SemanticLogger::Log>] the log events from Semantic Logger
5
+ # captured whilst executing the supplied block.
6
+ #
7
+ # Notes:
8
+ # - All log messages are returned regardless of the global default log level.
9
+ def semantic_logger_events(deprecated_klass = nil, klass: deprecated_klass, silence: :trace, &block)
10
+ logger = SemanticLogger::Test::CaptureLogEvents.new
11
+ if klass
12
+ klass.stub(:logger, logger, &block)
13
+ elsif silence
14
+ SemanticLogger.silence(silence) do
15
+ SemanticLogger::Logger.stub(:processor, logger, &block)
16
+ end
17
+ else
18
+ SemanticLogger::Logger.stub(:processor, logger, &block)
19
+ end
20
+ logger.events
21
+ end
22
+
23
+ # Verify a single log event has all the required attributes.
24
+ def assert_semantic_logger_event(event, level: nil, name: nil, message: nil, message_includes: nil,
25
+ payload: nil, payload_includes: nil,
26
+ exception: nil, exception_includes: nil, backtrace: nil,
27
+ thread_name: nil, tags: nil, named_tags: nil, context: nil,
28
+ level_index: nil, duration: nil, time: nil,
29
+ metric: nil, metric_amount: nil, dimensions: nil)
30
+ assert event, "No log event occurred"
31
+
32
+ assert_semantic_logger_entry(event, :message, message)
33
+ assert_semantic_logger_entry(event, :name, name)
34
+ assert_semantic_logger_entry(event, :level, level)
35
+ assert_semantic_logger_entry(event, :thread_name, thread_name)
36
+ assert_semantic_logger_entry(event, :tags, tags)
37
+ assert_semantic_logger_entry(event, :named_tags, named_tags)
38
+ assert_semantic_logger_entry(event, :context, context)
39
+ assert_semantic_logger_entry(event, :metric, metric)
40
+ assert_semantic_logger_entry(event, :metric_amount, metric_amount)
41
+ assert_semantic_logger_entry(event, :dimensions, dimensions)
42
+ assert_semantic_logger_entry(event, :level_index, level_index)
43
+ assert_semantic_logger_entry(event, :duration, duration)
44
+ assert_semantic_logger_entry(event, :time, time)
45
+ assert_semantic_logger_entry(event, :exception, exception)
46
+ assert_semantic_logger_entry(event, :backtrace, backtrace)
47
+ assert_semantic_logger_entry(event, :payload, payload)
48
+
49
+ if message_includes
50
+ assert_includes(
51
+ event.message,
52
+ message_includes,
53
+ -> { "Expected message to include '#{message_includes}' in log event #{event.inspect}" }
54
+ )
55
+ end
56
+
57
+ if payload_includes
58
+ payload_includes.each_pair do |key, expected|
59
+ actual = event.payload[key]
60
+ assert_semantic_logger_entry(event, "payload #{name}", expected, actual)
61
+ end
62
+ end
63
+
64
+ if exception_includes
65
+ payload_includes.each_pair do |key, expected|
66
+ actual = event.exception.send(key)
67
+ assert_semantic_logger_entry(event, "Exception #{name}", expected, actual)
68
+ end
69
+ end
70
+ end
71
+
72
+ private
73
+
74
+ def assert_semantic_logger_entry(event, name, expected, actual = event.send(name))
75
+ return if expected.nil?
76
+
77
+ case expected
78
+ when :nil
79
+ assert_nil actual, "Expected nil #{name} for log event: #{event.to_h.inspect}"
80
+ when Class
81
+ assert actual.is_a?(expected), -> { "Type #{expected} expected for #{name} in log event: #{event.to_h.inspect}" }
82
+ else
83
+ assert_equal expected, actual, "Mismatched #{name} for log event: #{event.to_h.inspect}"
84
+ end
85
+ end
86
+ end
87
+ end
88
+ end
@@ -1,3 +1,3 @@
1
1
  module SemanticLogger
2
- VERSION = "4.12.0".freeze
2
+ VERSION = "4.16.0".freeze
3
3
  end
@@ -1,49 +1,22 @@
1
+ require "concurrent"
1
2
  require "semantic_logger/core_ext/thread"
2
3
  require "semantic_logger/version"
3
-
4
- # @formatter:off
5
- module SemanticLogger
6
- autoload :AnsiColors, "semantic_logger/ansi_colors"
7
- autoload :Appender, "semantic_logger/appender"
8
- autoload :Appenders, "semantic_logger/appenders"
9
- autoload :Base, "semantic_logger/base"
10
- autoload :DebugAsTraceLogger, "semantic_logger/debug_as_trace_logger"
11
- autoload :Formatters, "semantic_logger/formatters"
12
- autoload :Levels, "semantic_logger/levels"
13
- autoload :Log, "semantic_logger/log"
14
- autoload :Logger, "semantic_logger/logger"
15
- autoload :Loggable, "semantic_logger/loggable"
16
- autoload :Processor, "semantic_logger/processor"
17
- autoload :Subscriber, "semantic_logger/subscriber"
18
- autoload :SyncProcessor, "semantic_logger/sync_processor"
19
- autoload :Utils, "semantic_logger/utils"
20
-
21
- module Concerns
22
- autoload :Compatibility, "semantic_logger/concerns/compatibility"
23
- end
24
-
25
- module Metric
26
- autoload :NewRelic, "semantic_logger/metric/new_relic"
27
- autoload :Signalfx, "semantic_logger/metric/signalfx"
28
- autoload :Statsd, "semantic_logger/metric/statsd"
29
- end
30
-
31
- module Reporters
32
- autoload :Minitest, "semantic_logger/reporters/minitest"
33
- end
34
-
35
- module Test
36
- autoload :CaptureLogEvents, "semantic_logger/test/capture_log_events"
37
- end
38
-
39
- if defined?(JRuby)
40
- module JRuby
41
- autoload :GarbageCollectionLogger, "semantic_logger/jruby/garbage_collection_logger"
42
- end
43
- end
44
- end
4
+ require "semantic_logger/utils"
5
+ require "semantic_logger/ansi_colors"
6
+ require "semantic_logger/levels"
7
+ require "semantic_logger/base"
8
+ require "semantic_logger/formatters"
9
+ require "semantic_logger/log"
10
+ require "semantic_logger/subscriber"
11
+ require "semantic_logger/loggable"
12
+ require "semantic_logger/concerns/compatibility"
13
+ require "semantic_logger/appender"
14
+ require "semantic_logger/appenders"
15
+ require "semantic_logger/processor"
16
+ require "semantic_logger/sync_processor"
17
+ require "semantic_logger/logger"
18
+ require "semantic_logger/debug_as_trace_logger"
45
19
  require "semantic_logger/semantic_logger"
46
- # @formatter:on
47
20
 
48
21
  # Flush all appenders at exit, waiting for outstanding messages on the queue
49
22
  # to be written first.
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: semantic_logger
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.12.0
4
+ version: 4.16.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Reid Morrison
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-10-30 00:00:00.000000000 Z
11
+ date: 2024-07-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: concurrent-ruby
@@ -44,11 +44,13 @@ files:
44
44
  - lib/semantic_logger/appender/file.rb
45
45
  - lib/semantic_logger/appender/graylog.rb
46
46
  - lib/semantic_logger/appender/honeybadger.rb
47
+ - lib/semantic_logger/appender/honeybadger_insights.rb
47
48
  - lib/semantic_logger/appender/http.rb
48
49
  - lib/semantic_logger/appender/io.rb
49
50
  - lib/semantic_logger/appender/kafka.rb
50
51
  - lib/semantic_logger/appender/mongodb.rb
51
52
  - lib/semantic_logger/appender/new_relic.rb
53
+ - lib/semantic_logger/appender/new_relic_logs.rb
52
54
  - lib/semantic_logger/appender/rabbitmq.rb
53
55
  - lib/semantic_logger/appender/sentry.rb
54
56
  - lib/semantic_logger/appender/sentry_ruby.rb
@@ -70,6 +72,7 @@ files:
70
72
  - lib/semantic_logger/formatters/fluentd.rb
71
73
  - lib/semantic_logger/formatters/json.rb
72
74
  - lib/semantic_logger/formatters/logfmt.rb
75
+ - lib/semantic_logger/formatters/new_relic_logs.rb
73
76
  - lib/semantic_logger/formatters/one_line.rb
74
77
  - lib/semantic_logger/formatters/raw.rb
75
78
  - lib/semantic_logger/formatters/signalfx.rb
@@ -90,12 +93,17 @@ files:
90
93
  - lib/semantic_logger/sync.rb
91
94
  - lib/semantic_logger/sync_processor.rb
92
95
  - lib/semantic_logger/test/capture_log_events.rb
96
+ - lib/semantic_logger/test/minitest.rb
93
97
  - lib/semantic_logger/utils.rb
94
98
  - lib/semantic_logger/version.rb
95
99
  homepage: https://logger.rocketjob.io
96
100
  licenses:
97
101
  - Apache-2.0
98
- metadata: {}
102
+ metadata:
103
+ bug_tracker_uri: https://github.com/reidmorrison/semantic_logger/issues
104
+ documentation_uri: https://logger.rocketjob.io
105
+ source_code_uri: https://github.com/reidmorrison/semantic_logger/tree/4.16.0
106
+ rubygems_mfa_required: 'true'
99
107
  post_install_message:
100
108
  rdoc_options: []
101
109
  require_paths:
@@ -111,7 +119,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
111
119
  - !ruby/object:Gem::Version
112
120
  version: '0'
113
121
  requirements: []
114
- rubygems_version: 3.2.33
122
+ rubygems_version: 3.5.3
115
123
  signing_key:
116
124
  specification_version: 4
117
125
  summary: Feature rich logging framework, and replacement for existing Ruby & Rails