semantic_logger 4.15.0 → 4.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: fcbc97117f68557ed7ffb4a60ccc482f712462436e4c612041d3496458f67826
4
- data.tar.gz: a847bf4a506663eb3e6c6f428fe53ae7a4f647589d10e129e9d64b0c40ef1ebe
3
+ metadata.gz: 3d393f5d7fde6627e73b79c370fed2f83e212eb43199bfbebab15b5eeb8ce66b
4
+ data.tar.gz: c75577f1ad4f5c209cb7fb0028c8d5738e9c546b2df6875bcec40170b1ca1f5e
5
5
  SHA512:
6
- metadata.gz: 4069c6fa8e26e84287ef4a521b83663d6bd8976e61d1793fd6c761c0c7714a1a8a3e5a34c29faf35b7f8e3e425404831c9e44d37d6fe3dca8a7271b791231332
7
- data.tar.gz: 5deba5054ad0e7e4240d88e705ef0862af3e72afd4219d7ac9ff99cee892f45efbf1de432b1dbadc976270ef19db6958dd0a4d6c4f35bc1203dfc2a4a4f0a356
6
+ metadata.gz: f86046a5e749f8d69ff17b9c4c9c91ff30507436bfe91f181e0c0a3a8fe1529d0f176940be3c099a9267d5f60e3215610cd3e2f49a2e70d55f88cce7c2345ae9
7
+ data.tar.gz: 4277d124c94f200d0d6aff5f74f58217cfd48276323b1d47656037dfbcd7d40b21e2ece17b7f50b521098608ce5d08dbd786fcbd53866b8fd052fe17f18db40d
data/README.md CHANGED
@@ -1,7 +1,7 @@
1
1
  # Semantic Logger
2
2
  [![Gem Version](https://img.shields.io/gem/v/semantic_logger.svg)](https://rubygems.org/gems/semantic_logger) [![Build Status](https://github.com/reidmorrison/semantic_logger/workflows/build/badge.svg)](https://github.com/reidmorrison/semantic_logger/actions?query=workflow%3Abuild) [![Downloads](https://img.shields.io/gem/dt/semantic_logger.svg)](https://rubygems.org/gems/semantic_logger) [![License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](http://opensource.org/licenses/Apache-2.0) ![](https://img.shields.io/badge/status-Production%20Ready-blue.svg)
3
3
 
4
- Semantic Logger is a feature rich logging framework, and replacement for existing Ruby & Rails loggers.
4
+ Semantic Logger is a feature rich logging framework, and replacement for existing Ruby & Rails loggers.
5
5
 
6
6
  * https://logger.rocketjob.io/
7
7
 
@@ -21,7 +21,7 @@ Logging to the following destinations are all supported "out-of-the-box":
21
21
  * NewRelic
22
22
  * Splunk
23
23
  * MongoDB
24
- * Honeybadger
24
+ * Honeybadger (exceptions and events)
25
25
  * Sentry (both with legacy `sentry-raven` and modern `sentry-ruby` gem)
26
26
  * HTTP
27
27
  * TCP
@@ -54,6 +54,8 @@ The following gems are only required when their corresponding appenders are bein
54
54
  and are therefore not automatically included by this gem:
55
55
  - Bugsnag Appender: gem 'bugsnag'
56
56
  - MongoDB Appender: gem 'mongo' 1.9.2 or above
57
+ - Honeybadger Appender: gem 'honeybadger'
58
+ - HoneybadgerInsights Appender: gem 'honeybadger'
57
59
  - NewRelic Appender: gem 'newrelic_rpm'
58
60
  - NewRelicLogs Appender: gem 'newrelic_rpm'
59
61
  - Syslog Appender: gem 'syslog_protocol' 0.9.2 or above
@@ -129,16 +131,16 @@ logger.debug payload: {foo: 'foo', bar: 'bar'}
129
131
  Similarly, for measure blocks:
130
132
 
131
133
  ~~~ruby
132
- logger.measure_info('How long is the sleep', foo: 'foo', bar: 'bar') { sleep 1 }
134
+ logger.measure_info('How long is the sleep', foo: 'foo', bar: 'bar') { sleep 1 }
133
135
  ~~~
134
136
 
135
137
  Must be replaced with the following in v4:
136
138
 
137
139
  ~~~ruby
138
- logger.measure_info('How long is the sleep', payload: {foo: 'foo', bar: 'bar'}) { sleep 1 }
140
+ logger.measure_info('How long is the sleep', payload: {foo: 'foo', bar: 'bar'}) { sleep 1 }
139
141
  ~~~
140
142
 
141
- The common log call has not changed, and the payload is still logged directly:
143
+ The common log call has not changed, and the payload is still logged directly:
142
144
 
143
145
  ~~~ruby
144
146
  logger.debug('log this', foo: 'foo', bar: 'bar')
@@ -147,7 +147,7 @@ module SemanticLogger
147
147
  @elasticsearch_args = elasticsearch_args.dup
148
148
  @elasticsearch_args[:url] = url if url && !elasticsearch_args[:hosts]
149
149
  @elasticsearch_args[:logger] = logger
150
- @data_stream = data_stream
150
+ @data_stream = data_stream
151
151
 
152
152
  super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: false, &block)
153
153
  reopen
@@ -177,11 +177,12 @@ module SemanticLogger
177
177
  private
178
178
 
179
179
  def write_to_elasticsearch(messages)
180
- bulk_result = if @data_stream
181
- @client.bulk(index: index, body: messages)
182
- else
183
- @client.bulk(body: messages)
184
- end
180
+ bulk_result =
181
+ if @data_stream
182
+ @client.bulk(index: index, body: messages)
183
+ else
184
+ @client.bulk(body: messages)
185
+ end
185
186
 
186
187
  return unless bulk_result["errors"]
187
188
 
@@ -191,27 +192,18 @@ module SemanticLogger
191
192
 
192
193
  def bulk_index(log)
193
194
  expanded_index_name = log.time.strftime("#{index}-#{date_pattern}")
194
- if @data_stream
195
- {"create" => {}}
196
- else
197
- bulk_index = {"index" => {"_index" => expanded_index_name}}
198
- bulk_index["index"].merge!({ "_type" => type }) if version_supports_type?
199
- bulk_index
200
- end
195
+ return {"create" => {}} if @data_stream
196
+
197
+ bulk_index = {"index" => {"_index" => expanded_index_name}}
198
+ bulk_index["index"].merge!({"_type" => type}) if version_supports_type?
199
+ bulk_index
201
200
  end
202
201
 
203
202
  def default_formatter
204
- time_key = if @data_stream
205
- "@timestamp"
206
- else
207
- :timestamp
208
- end
209
-
203
+ time_key = @data_stream ? "@timestamp" : :timestamp
210
204
  SemanticLogger::Formatters::Raw.new(time_format: :iso_8601, time_key: time_key)
211
205
  end
212
206
 
213
- private
214
-
215
207
  def version_supports_type?
216
208
  Gem::Version.new(::Elasticsearch::VERSION) < Gem::Version.new(7)
217
209
  end
@@ -0,0 +1,61 @@
1
+ begin
2
+ require "honeybadger"
3
+ rescue LoadError
4
+ raise LoadError, 'Gem honeybadger is required for logging purposes. Please add the gem "honeybadger" to your Gemfile.'
5
+ end
6
+
7
+ # Send log messages to honeybadger events/insights API
8
+ #
9
+ # Example:
10
+ # SemanticLogger.add_appender(appender: :honeybadger_insights)
11
+ #
12
+ module SemanticLogger
13
+ module Appender
14
+ class HoneybadgerInsights < SemanticLogger::Subscriber
15
+ # Honeybadger Appender
16
+ #
17
+ # Parameters
18
+ # level: [:trace | :debug | :info | :warn | :error | :fatal]
19
+ # Override the log level for this appender.
20
+ # Default: :error
21
+ #
22
+ # formatter: [Object|Proc|Symbol|Hash]
23
+ # An instance of a class that implements #call, or a Proc to be used to format
24
+ # the output from this appender
25
+ # Default: Use the built-in formatter (See: #call)
26
+ #
27
+ # filter: [Regexp|Proc]
28
+ # RegExp: Only include log messages where the class name matches the supplied.
29
+ # regular expression. All other messages will be ignored.
30
+ # Proc: Only include log messages where the supplied Proc returns true
31
+ # The Proc must return true or false.
32
+ #
33
+ # host: [String]
34
+ # Name of this host to appear in log messages.
35
+ # Default: SemanticLogger.host
36
+ #
37
+ # application: [String]
38
+ # Name of this application to appear in log messages.
39
+ # Default: SemanticLogger.application
40
+ def initialize(level: :info, **args, &block)
41
+ super(level: level, **args, &block)
42
+ end
43
+
44
+ # Send log to honeybadger events API
45
+ def log(log)
46
+ event = formatter.call(log, self)
47
+
48
+ ::Honeybadger.event(event)
49
+
50
+ true
51
+ end
52
+
53
+ private
54
+
55
+ # Use Raw Formatter by default
56
+ def default_formatter
57
+ SemanticLogger::Formatters::Raw.new(time_key: :ts, time_format: :rfc_3339)
58
+ end
59
+ end
60
+ end
61
+ end
@@ -60,6 +60,10 @@ module SemanticLogger
60
60
  # Name of this application to appear in log messages.
61
61
  # Default: SemanticLogger.application
62
62
  #
63
+ # metrics: [Boolean]
64
+ # Also send metrics only events to rabbitmq.
65
+ # Default: true
66
+ #
63
67
  # RabbitMQ Parameters:
64
68
  #
65
69
  # rabbitmq_host: [String]
@@ -76,13 +80,15 @@ module SemanticLogger
76
80
  # Default: nil
77
81
  #
78
82
  # more parameters supported by Bunny: http://rubybunny.info/articles/connecting.html
79
- def initialize(queue_name: "semantic_logger", rabbitmq_host: nil, metrics: false, **args, &block)
83
+ def initialize(queue_name: "semantic_logger", rabbitmq_host: nil,
84
+ level: nil, formatter: nil, filter: nil, application: nil, environment: nil, host: nil, metrics: true,
85
+ **args, &block)
80
86
  @queue_name = queue_name
81
87
  @rabbitmq_args = args.dup
82
88
  @rabbitmq_args[:host] = rabbitmq_host
83
89
  @rabbitmq_args[:logger] = logger
84
90
 
85
- super(level: level, formatter: formatter, filter: filter, application: application, host: host, metrics: metrics, &block)
91
+ super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
86
92
  reopen
87
93
  end
88
94
 
@@ -101,10 +101,10 @@ module SemanticLogger
101
101
  # open the handles to resources
102
102
  def reopen
103
103
  # Connect to splunk. Connect is a synonym for creating a Service by hand and calling login.
104
- self.service = Splunk.connect(config)
104
+ @service = ::Splunk.connect(config)
105
105
 
106
106
  # The index we are logging to
107
- self.service_index = service.indexes[index]
107
+ @service_index = service.indexes[index]
108
108
  end
109
109
 
110
110
  # Log the message to Splunk
@@ -208,7 +208,7 @@ module SemanticLogger
208
208
 
209
209
  # Flush is called by the semantic_logger during shutdown.
210
210
  def flush
211
- @remote_syslog.flush if @remote_syslog&.respond_to?(:flush)
211
+ @remote_syslog.flush if @remote_syslog.respond_to?(:flush)
212
212
  end
213
213
 
214
214
  # Returns [SemanticLogger::Formatters::Base] default formatter for this Appender depending on the protocal selected
@@ -191,7 +191,7 @@ module SemanticLogger
191
191
  Net::TCPClient.logger = logger
192
192
  Net::TCPClient.logger.name = "Net::TCPClient"
193
193
 
194
- super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, &block)
194
+ super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
195
195
  reopen
196
196
  end
197
197
 
@@ -1,7 +1,6 @@
1
1
  # Send log messages to any standard Ruby logging class.
2
2
  #
3
3
  # Forwards logging call to loggers such as Logger, log4r, etc.
4
- #
5
4
  module SemanticLogger
6
5
  module Appender
7
6
  class Wrapper < SemanticLogger::Subscriber
@@ -56,15 +55,22 @@ module SemanticLogger
56
55
  # trace entries are mapped to debug since :trace is not supported by the
57
56
  # Ruby or Rails Loggers
58
57
  def log(log)
59
- @logger.send(log.level == :trace ? :debug : log.level, formatter.call(log, self))
58
+ level = log.level == :trace ? :debug : log.level
59
+ @logger.send(level, formatter.call(log, self))
60
60
  true
61
61
  end
62
62
 
63
63
  # Flush all pending logs to disk.
64
- # Waits for all sent documents to be writted to disk
64
+ # Waits for all queued log messages to be written to disk.
65
65
  def flush
66
66
  @logger.flush if @logger.respond_to?(:flush)
67
67
  end
68
+
69
+ # Close underlying log
70
+ # Waits for all queued log messages to be written to disk.
71
+ def close
72
+ @logger.close if @logger.respond_to?(:close)
73
+ end
68
74
  end
69
75
  end
70
76
  end
@@ -1,29 +1,30 @@
1
1
  module SemanticLogger
2
2
  module Appender
3
3
  # @formatter:off
4
- autoload :Async, "semantic_logger/appender/async"
5
- autoload :AsyncBatch, "semantic_logger/appender/async_batch"
6
- autoload :Bugsnag, "semantic_logger/appender/bugsnag"
7
- autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
8
- autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
9
- autoload :File, "semantic_logger/appender/file"
10
- autoload :Graylog, "semantic_logger/appender/graylog"
11
- autoload :Honeybadger, "semantic_logger/appender/honeybadger"
12
- autoload :IO, "semantic_logger/appender/io"
13
- autoload :Kafka, "semantic_logger/appender/kafka"
14
- autoload :Sentry, "semantic_logger/appender/sentry"
15
- autoload :Http, "semantic_logger/appender/http"
16
- autoload :MongoDB, "semantic_logger/appender/mongodb"
17
- autoload :NewRelic, "semantic_logger/appender/new_relic"
18
- autoload :NewRelicLogs, "semantic_logger/appender/new_relic_logs"
19
- autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
20
- autoload :Splunk, "semantic_logger/appender/splunk"
21
- autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
22
- autoload :Syslog, "semantic_logger/appender/syslog"
23
- autoload :Tcp, "semantic_logger/appender/tcp"
24
- autoload :Udp, "semantic_logger/appender/udp"
25
- autoload :Wrapper, "semantic_logger/appender/wrapper"
26
- autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
4
+ autoload :Async, "semantic_logger/appender/async"
5
+ autoload :AsyncBatch, "semantic_logger/appender/async_batch"
6
+ autoload :Bugsnag, "semantic_logger/appender/bugsnag"
7
+ autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
8
+ autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
9
+ autoload :File, "semantic_logger/appender/file"
10
+ autoload :Graylog, "semantic_logger/appender/graylog"
11
+ autoload :Honeybadger, "semantic_logger/appender/honeybadger"
12
+ autoload :HoneybadgerInsights, "semantic_logger/appender/honeybadger_insights"
13
+ autoload :IO, "semantic_logger/appender/io"
14
+ autoload :Kafka, "semantic_logger/appender/kafka"
15
+ autoload :Sentry, "semantic_logger/appender/sentry"
16
+ autoload :Http, "semantic_logger/appender/http"
17
+ autoload :MongoDB, "semantic_logger/appender/mongodb"
18
+ autoload :NewRelic, "semantic_logger/appender/new_relic"
19
+ autoload :NewRelicLogs, "semantic_logger/appender/new_relic_logs"
20
+ autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
21
+ autoload :Splunk, "semantic_logger/appender/splunk"
22
+ autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
23
+ autoload :Syslog, "semantic_logger/appender/syslog"
24
+ autoload :Tcp, "semantic_logger/appender/tcp"
25
+ autoload :Udp, "semantic_logger/appender/udp"
26
+ autoload :Wrapper, "semantic_logger/appender/wrapper"
27
+ autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
27
28
  # @formatter:on
28
29
 
29
30
  # Returns [SemanticLogger::Subscriber] appender for the supplied options
@@ -6,6 +6,7 @@ module SemanticLogger
6
6
  def initialize(logger = Processor.logger.dup)
7
7
  @logger = logger
8
8
  @logger.name = self.class.name
9
+ super()
9
10
  end
10
11
 
11
12
  def add(**args, &block)
@@ -45,18 +46,21 @@ module SemanticLogger
45
46
  end
46
47
 
47
48
  def close
48
- to_a.each do |appender|
49
+ closed_appenders = []
50
+ each do |appender|
49
51
  logger.trace "Closing appender: #{appender.name}"
50
- delete(appender)
52
+ appenders << appender
51
53
  appender.flush
52
54
  appender.close
53
55
  rescue Exception => e
54
56
  logger.error "Failed to close appender: #{appender.name}", e
55
57
  end
58
+ # Delete appenders outside the #each above which prevents changes during iteration.
59
+ closed_appenders.each { |appender| delete(appender) }
56
60
  logger.trace "All appenders closed and removed from appender list"
57
61
  end
58
62
 
59
- # After a fork the appender thread is not running, start it if it is not running.
63
+ # After a fork reopen each appender.
60
64
  def reopen
61
65
  each do |appender|
62
66
  next unless appender.respond_to?(:reopen)
@@ -342,9 +342,10 @@ module SemanticLogger
342
342
  # Add result of block to message or payload if not nil
343
343
  if block_given?
344
344
  result = yield(log)
345
- if result.is_a?(String)
345
+ case result
346
+ when String
346
347
  log.message = log.message.nil? ? result : "#{log.message} -- #{result}"
347
- elsif result.is_a?(Hash)
348
+ when Hash
348
349
  log.assign_hash(result)
349
350
  end
350
351
  end
@@ -3,20 +3,9 @@ module SemanticLogger
3
3
  # This is useful for existing gems / libraries that log too much to debug
4
4
  # when most of the debug logging should be at the trace level
5
5
  class DebugAsTraceLogger < Logger
6
- def debug(*args, &block)
7
- trace(*args, &block)
8
- end
9
-
10
- def debug?
11
- trace?
12
- end
13
-
14
- def measure_debug(*args, &block)
15
- measure_trace(*args, &block)
16
- end
17
-
18
- def benchmark_debug(*args, &block)
19
- measure_trace(*args, &block)
20
- end
6
+ alias debug trace
7
+ alias debug? trace?
8
+ alias measure_debug measure_trace
9
+ alias benchmark_debug benchmark_trace
21
10
  end
22
11
  end
@@ -31,7 +31,7 @@ module SemanticLogger
31
31
  private
32
32
 
33
33
  def raw_to_logfmt
34
- @parsed = @raw.slice(time_key, :level, :name, :message, :duration).merge(tag: "success")
34
+ @parsed = @raw.slice(time_key, :level, :name, :message, :duration, :duration_ms).merge(tag: "success")
35
35
  handle_tags
36
36
  handle_payload
37
37
  handle_exception
@@ -86,7 +86,7 @@ module SemanticLogger
86
86
 
87
87
  # Payload
88
88
  def payload
89
- hash[:payload] = log.payload if log.payload&.respond_to?(:empty?) && !log.payload.empty?
89
+ hash[:payload] = log.payload if log.payload.respond_to?(:empty?) && !log.payload.empty?
90
90
  end
91
91
 
92
92
  # Exception
@@ -82,7 +82,10 @@ module SemanticLogger
82
82
  self.log = log
83
83
  self.logger = logger
84
84
 
85
- metric; time; value; format_dimensions
85
+ metric
86
+ time
87
+ value
88
+ format_dimensions
86
89
 
87
90
  # gauge, counter, or cumulative_counter
88
91
  data = {}
@@ -113,7 +116,10 @@ module SemanticLogger
113
116
  self.hash = {}
114
117
  self.log = log
115
118
 
116
- metric; time; value; format_dimensions
119
+ metric
120
+ time
121
+ value
122
+ format_dimensions
117
123
 
118
124
  if log.duration
119
125
  gauges = (data[:gauge] ||= [])
@@ -144,7 +144,9 @@ module SemanticLogger
144
144
  raise(ArgumentError, "payload must be a Hash") unless payload.is_a?(Hash)
145
145
 
146
146
  message = nil if message == ""
147
- return payload if payload.key?(:payload)
147
+ if payload.key?(:payload)
148
+ return message ? payload.merge(message: message) : payload
149
+ end
148
150
 
149
151
  new_payload = {}
150
152
  args = {}
@@ -248,7 +250,11 @@ module SemanticLogger
248
250
 
249
251
  # Extract the filename and line number from the last entry in the supplied backtrace
250
252
  def extract_file_and_line(stack, short_name = false)
253
+ return unless stack&.size&.positive?
254
+
251
255
  match = CALLER_REGEXP.match(stack.first)
256
+ return unless match
257
+
252
258
  [short_name ? File.basename(match[1]) : match[1], match[2].to_i]
253
259
  end
254
260
 
@@ -256,7 +262,7 @@ module SemanticLogger
256
262
  # in either the backtrace or exception
257
263
  def file_name_and_line(short_name = false)
258
264
  stack = backtrace || exception&.backtrace
259
- extract_file_and_line(stack, short_name) if stack&.size&.positive?
265
+ extract_file_and_line(stack, short_name)
260
266
  end
261
267
 
262
268
  # Strip the standard Rails colorizing from the logged message
@@ -28,6 +28,7 @@ module SemanticLogger
28
28
  # )
29
29
  def initialize(url: "udp://localhost:8125")
30
30
  @url = url
31
+ super()
31
32
  end
32
33
 
33
34
  def reopen
@@ -2,26 +2,26 @@ module SemanticLogger
2
2
  # The SyncProcessor performs logging in the current thread.
3
3
  #
4
4
  # Appenders are designed to only be used by one thread at a time, so all calls
5
- # are mutex protected in case SyncProcessor is being used in a multi-threaded environment.
5
+ # are monitor protected in case SyncProcessor is being used in a multi-threaded environment.
6
6
  class SyncProcessor
7
7
  def add(*args, &block)
8
- @mutex.synchronize { @appenders.add(*args, &block) }
8
+ @monitor.synchronize { @appenders.add(*args, &block) }
9
9
  end
10
10
 
11
11
  def log(*args, &block)
12
- @mutex.synchronize { @appenders.log(*args, &block) }
12
+ @monitor.synchronize { @appenders.log(*args, &block) }
13
13
  end
14
14
 
15
15
  def flush
16
- @mutex.synchronize { @appenders.flush }
16
+ @monitor.synchronize { @appenders.flush }
17
17
  end
18
18
 
19
19
  def close
20
- @mutex.synchronize { @appenders.close }
20
+ @monitor.synchronize { @appenders.close }
21
21
  end
22
22
 
23
23
  def reopen(*args)
24
- @mutex.synchronize { @appenders.reopen(*args) }
24
+ @monitor.synchronize { @appenders.reopen(*args) }
25
25
  end
26
26
 
27
27
  # Allow the internal logger to be overridden from its default of $stderr
@@ -47,7 +47,7 @@ module SemanticLogger
47
47
  attr_reader :appenders
48
48
 
49
49
  def initialize(appenders = nil)
50
- @mutex = Mutex.new
50
+ @monitor = Monitor.new
51
51
  @appenders = appenders || Appenders.new(self.class.logger.dup)
52
52
  end
53
53
 
@@ -4,20 +4,21 @@ module SemanticLogger
4
4
  #
5
5
  # Example:
6
6
  #
7
- # class UserTest < ActiveSupport::TestCase
8
- # describe User do
9
- # let(:capture_logger) { SemanticLogger::Test::CaptureLogEvents.new }
10
- # let(:user) { User.new }
7
+ # class UserTest < ActiveSupport::TestCase
8
+ # describe User do
9
+ # let(:logger) { SemanticLogger::Test::CaptureLogEvents.new }
10
+ # let(:user) { User.new }
11
11
  #
12
- # it "logs message" do
13
- # user.stub(:logger, capture_logger) do
14
- # user.enable!
12
+ # it "logs message" do
13
+ # user.stub(:logger, logger) do
14
+ # user.enable!
15
+ # end
16
+ # assert log = logger.events.first
17
+ # assert_equal "Hello World", log.message
18
+ # assert_equal :info, log.level
15
19
  # end
16
- # assert_equal "Hello World", capture_logger.events.last.message
17
- # assert_equal :info, capture_logger.events.last.level
18
20
  # end
19
21
  # end
20
- # end
21
22
  class CaptureLogEvents < SemanticLogger::Subscriber
22
23
  attr_accessor :events
23
24
 
@@ -28,12 +29,27 @@ module SemanticLogger
28
29
  end
29
30
 
30
31
  def log(log)
32
+ Logger.call_subscribers(log)
31
33
  @events << log
32
34
  end
33
35
 
36
+ # Supports batching of log events
37
+ def batch(logs)
38
+ @events += log
39
+ end
40
+
34
41
  def clear
35
42
  @events.clear
36
43
  end
44
+
45
+ # Support silencing of log messages
46
+ def level_index
47
+ @level_index || SemanticLogger.default_level_index
48
+ end
49
+
50
+ def to_h
51
+ events.map(&:to_h)
52
+ end
37
53
  end
38
54
  end
39
55
  end
@@ -3,14 +3,19 @@ module SemanticLogger
3
3
  module Minitest
4
4
  # Returns [Array<SemanticLogger::Log>] the log events from Semantic Logger
5
5
  # captured whilst executing the supplied block.
6
- def semantic_logger_events(klass = nil, &block)
6
+ #
7
+ # Notes:
8
+ # - All log messages are returned regardless of the global default log level.
9
+ def semantic_logger_events(deprecated_klass = nil, klass: deprecated_klass, silence: :trace, &block)
7
10
  logger = SemanticLogger::Test::CaptureLogEvents.new
8
11
  if klass
9
12
  klass.stub(:logger, logger, &block)
10
- else
11
- SemanticLogger.silence(:trace) do
13
+ elsif silence
14
+ SemanticLogger.silence(silence) do
12
15
  SemanticLogger::Logger.stub(:processor, logger, &block)
13
16
  end
17
+ else
18
+ SemanticLogger::Logger.stub(:processor, logger, &block)
14
19
  end
15
20
  logger.events
16
21
  end
@@ -18,35 +23,65 @@ module SemanticLogger
18
23
  # Verify a single log event has all the required attributes.
19
24
  def assert_semantic_logger_event(event, level: nil, name: nil, message: nil, message_includes: nil,
20
25
  payload: nil, payload_includes: nil,
26
+ exception: nil, exception_includes: nil, backtrace: nil,
21
27
  thread_name: nil, tags: nil, named_tags: nil, context: nil,
28
+ level_index: nil, duration: nil, time: nil,
22
29
  metric: nil, metric_amount: nil, dimensions: nil)
23
- msg = message || message_includes || "no message"
24
- assert event, "Log event missing for message: '#{msg}'"
25
- assert_equal message, event.message if message
26
- assert_includes event.message, message_includes if message_includes
27
- assert_equal name, event.name, -> { "Mismatched log name for message: '#{msg}'" } if name
28
- assert_equal level, event.level, -> { "Mismatched log level for message: '#{msg}'" } if level
30
+ assert event, "No log event occurred"
31
+
32
+ assert_semantic_logger_entry(event, :message, message)
33
+ assert_semantic_logger_entry(event, :name, name)
34
+ assert_semantic_logger_entry(event, :level, level)
35
+ assert_semantic_logger_entry(event, :thread_name, thread_name)
36
+ assert_semantic_logger_entry(event, :tags, tags)
37
+ assert_semantic_logger_entry(event, :named_tags, named_tags)
38
+ assert_semantic_logger_entry(event, :context, context)
39
+ assert_semantic_logger_entry(event, :metric, metric)
40
+ assert_semantic_logger_entry(event, :metric_amount, metric_amount)
41
+ assert_semantic_logger_entry(event, :dimensions, dimensions)
42
+ assert_semantic_logger_entry(event, :level_index, level_index)
43
+ assert_semantic_logger_entry(event, :duration, duration)
44
+ assert_semantic_logger_entry(event, :time, time)
45
+ assert_semantic_logger_entry(event, :exception, exception)
46
+ assert_semantic_logger_entry(event, :backtrace, backtrace)
47
+ assert_semantic_logger_entry(event, :payload, payload)
48
+
49
+ if message_includes
50
+ assert_includes(
51
+ event.message,
52
+ message_includes,
53
+ -> { "Expected message to include '#{message_includes}' in log event #{event.inspect}" }
54
+ )
55
+ end
29
56
 
30
57
  if payload_includes
31
- payload_includes.each_pair do |key, expected_value|
32
- value = event.payload[key]
33
- if expected_value.nil?
34
- assert_nil value, -> { "Mismatched key: #{key.inspect} in log payload: #{event.payload} for message: '#{msg}'" }
35
- else
36
- assert_equal expected_value, value, -> { "Mismatched key: #{key.inspect} in log payload: #{event.payload} for message: '#{msg}'" }
37
- end
58
+ payload_includes.each_pair do |key, expected|
59
+ actual = event.payload[key]
60
+ assert_semantic_logger_entry(event, "payload #{name}", expected, actual)
38
61
  end
39
- elsif payload
40
- assert_equal payload, event.payload, -> { "Mismatched log payload: #{event.payload} for message: '#{msg}'" }
41
62
  end
42
63
 
43
- assert_equal thread_name, event.thread_name, -> { "Mismatched thread_name for message: '#{msg}'" } if thread_name
44
- assert_equal tags, event.tags, -> { "Mismatched tags for message: '#{msg}'" } if tags
45
- assert_equal named_tags, event.named_tags, -> { "Mismatched named_tags for message: '#{msg}'" } if named_tags
46
- assert_equal context, event.context, -> { "Mismatched context for message: '#{msg}'" } if context
47
- assert_equal metric, event.metric, -> { "Mismatched metric for message: '#{msg}'" } if metric
48
- assert_equal metric_amount, event.metric_amount, -> { "Mismatched metric_amount for message: '#{msg}'" } if metric_amount
49
- assert_equal dimensions, event.dimensions, -> { "Mismatched dimensions for message: '#{msg}'" } if dimensions
64
+ if exception_includes
65
+ payload_includes.each_pair do |key, expected|
66
+ actual = event.exception.send(key)
67
+ assert_semantic_logger_entry(event, "Exception #{name}", expected, actual)
68
+ end
69
+ end
70
+ end
71
+
72
+ private
73
+
74
+ def assert_semantic_logger_entry(event, name, expected, actual = event.send(name))
75
+ return if expected.nil?
76
+
77
+ case expected
78
+ when :nil
79
+ assert_nil actual, "Expected nil #{name} for log event: #{event.to_h.inspect}"
80
+ when Class
81
+ assert actual.is_a?(expected), -> { "Type #{expected} expected for #{name} in log event: #{event.to_h.inspect}" }
82
+ else
83
+ assert_equal expected, actual, "Mismatched #{name} for log event: #{event.to_h.inspect}"
84
+ end
50
85
  end
51
86
  end
52
87
  end
@@ -1,3 +1,3 @@
1
1
  module SemanticLogger
2
- VERSION = "4.15.0".freeze
2
+ VERSION = "4.16.0".freeze
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: semantic_logger
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.15.0
4
+ version: 4.16.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Reid Morrison
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-11-09 00:00:00.000000000 Z
11
+ date: 2024-07-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: concurrent-ruby
@@ -44,6 +44,7 @@ files:
44
44
  - lib/semantic_logger/appender/file.rb
45
45
  - lib/semantic_logger/appender/graylog.rb
46
46
  - lib/semantic_logger/appender/honeybadger.rb
47
+ - lib/semantic_logger/appender/honeybadger_insights.rb
47
48
  - lib/semantic_logger/appender/http.rb
48
49
  - lib/semantic_logger/appender/io.rb
49
50
  - lib/semantic_logger/appender/kafka.rb
@@ -101,7 +102,7 @@ licenses:
101
102
  metadata:
102
103
  bug_tracker_uri: https://github.com/reidmorrison/semantic_logger/issues
103
104
  documentation_uri: https://logger.rocketjob.io
104
- source_code_uri: https://github.com/reidmorrison/semantic_logger/tree/4.15.0
105
+ source_code_uri: https://github.com/reidmorrison/semantic_logger/tree/4.16.0
105
106
  rubygems_mfa_required: 'true'
106
107
  post_install_message:
107
108
  rdoc_options: []
@@ -118,7 +119,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
118
119
  - !ruby/object:Gem::Version
119
120
  version: '0'
120
121
  requirements: []
121
- rubygems_version: 3.4.9
122
+ rubygems_version: 3.5.3
122
123
  signing_key:
123
124
  specification_version: 4
124
125
  summary: Feature rich logging framework, and replacement for existing Ruby & Rails