semantic_logger 4.13.0 → 4.16.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0c00e5f1ab5a36a350afa8a082d367ec78c59b62bcfc29027fa843be48b658d4
4
- data.tar.gz: 5d77c0d14230c349b5dd58aa29b8f625ea867f7600665482a724b8f13db44f49
3
+ metadata.gz: 475a3c63ee87f78516ad0e695879d79af03c5d68a86c129f0c128f0faaa8ad82
4
+ data.tar.gz: cebcd2653fc6d08090f63070a52147e5e1878c34407c8b1fd987b3959f4b5955
5
5
  SHA512:
6
- metadata.gz: e8c85cca0bf54d04235b12030c0602cdea04b597fcac1544819f25e1b3c40ae35a0874ed7d8b1286db370f5e4d122f812f2437f3e32c20ab45e443f3a735f641
7
- data.tar.gz: 172a1ff0b357792afba013577a326e1e2a9ea1bf3b8e83d01f8e0424e2ae9332053322ccbbda328c3607930a7f9cdec0aa5d05daa5b871fdf75182fc806d731b
6
+ metadata.gz: 7d988c5776532787067b5f2b82cac36e41e51a2a93db68f5fc2fb8ee5f278d4d5b2462bd78819a166a725289930a7bc128a5753ed5415409a74666408471b052
7
+ data.tar.gz: 84ea17d324b85502ae1f48c3ffa220762035fb9eaf180057f35e355a11224ad3583524953b14ab8c47fe129a152cf78fe210702d04f75929d161afd4c9d72ecc
data/README.md CHANGED
@@ -1,7 +1,7 @@
1
1
  # Semantic Logger
2
2
  [![Gem Version](https://img.shields.io/gem/v/semantic_logger.svg)](https://rubygems.org/gems/semantic_logger) [![Build Status](https://github.com/reidmorrison/semantic_logger/workflows/build/badge.svg)](https://github.com/reidmorrison/semantic_logger/actions?query=workflow%3Abuild) [![Downloads](https://img.shields.io/gem/dt/semantic_logger.svg)](https://rubygems.org/gems/semantic_logger) [![License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](http://opensource.org/licenses/Apache-2.0) ![](https://img.shields.io/badge/status-Production%20Ready-blue.svg)
3
3
 
4
- Semantic Logger is a feature rich logging framework, and replacement for existing Ruby & Rails loggers.
4
+ Semantic Logger is a feature rich logging framework, and replacement for existing Ruby & Rails loggers.
5
5
 
6
6
  * https://logger.rocketjob.io/
7
7
 
@@ -21,7 +21,7 @@ Logging to the following destinations are all supported "out-of-the-box":
21
21
  * NewRelic
22
22
  * Splunk
23
23
  * MongoDB
24
- * Honeybadger
24
+ * Honeybadger (exceptions and events)
25
25
  * Sentry (both with legacy `sentry-raven` and modern `sentry-ruby` gem)
26
26
  * HTTP
27
27
  * TCP
@@ -54,6 +54,8 @@ The following gems are only required when their corresponding appenders are bein
54
54
  and are therefore not automatically included by this gem:
55
55
  - Bugsnag Appender: gem 'bugsnag'
56
56
  - MongoDB Appender: gem 'mongo' 1.9.2 or above
57
+ - Honeybadger Appender: gem 'honeybadger'
58
+ - HoneybadgerInsights Appender: gem 'honeybadger'
57
59
  - NewRelic Appender: gem 'newrelic_rpm'
58
60
  - NewRelicLogs Appender: gem 'newrelic_rpm'
59
61
  - Syslog Appender: gem 'syslog_protocol' 0.9.2 or above
@@ -129,16 +131,16 @@ logger.debug payload: {foo: 'foo', bar: 'bar'}
129
131
  Similarly, for measure blocks:
130
132
 
131
133
  ~~~ruby
132
- logger.measure_info('How long is the sleep', foo: 'foo', bar: 'bar') { sleep 1 }
134
+ logger.measure_info('How long is the sleep', foo: 'foo', bar: 'bar') { sleep 1 }
133
135
  ~~~
134
136
 
135
137
  Must be replaced with the following in v4:
136
138
 
137
139
  ~~~ruby
138
- logger.measure_info('How long is the sleep', payload: {foo: 'foo', bar: 'bar'}) { sleep 1 }
140
+ logger.measure_info('How long is the sleep', payload: {foo: 'foo', bar: 'bar'}) { sleep 1 }
139
141
  ~~~
140
142
 
141
- The common log call has not changed, and the payload is still logged directly:
143
+ The common log call has not changed, and the payload is still logged directly:
142
144
 
143
145
  ~~~ruby
144
146
  logger.debug('log this', foo: 'foo', bar: 'bar')
@@ -37,6 +37,7 @@ module SemanticLogger
37
37
  #
38
38
  # type: [String]
39
39
  # Document type to associate with logs when they are written.
40
+ # Deprecated in Elasticsearch 7.0.0.
40
41
  # Default: 'log'
41
42
  #
42
43
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
@@ -146,7 +147,7 @@ module SemanticLogger
146
147
  @elasticsearch_args = elasticsearch_args.dup
147
148
  @elasticsearch_args[:url] = url if url && !elasticsearch_args[:hosts]
148
149
  @elasticsearch_args[:logger] = logger
149
- @data_stream = data_stream
150
+ @data_stream = data_stream
150
151
 
151
152
  super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: false, &block)
152
153
  reopen
@@ -176,11 +177,12 @@ module SemanticLogger
176
177
  private
177
178
 
178
179
  def write_to_elasticsearch(messages)
179
- bulk_result = if @data_stream
180
- @client.bulk(index: index, body: messages)
181
- else
182
- @client.bulk(body: messages)
183
- end
180
+ bulk_result =
181
+ if @data_stream
182
+ @client.bulk(index: index, body: messages)
183
+ else
184
+ @client.bulk(body: messages)
185
+ end
184
186
 
185
187
  return unless bulk_result["errors"]
186
188
 
@@ -190,22 +192,21 @@ module SemanticLogger
190
192
 
191
193
  def bulk_index(log)
192
194
  expanded_index_name = log.time.strftime("#{index}-#{date_pattern}")
193
- if @data_stream
194
- {"create" => {}}
195
- else
196
- {"index" => {"_index" => expanded_index_name, "_type" => type}}
197
- end
195
+ return {"create" => {}} if @data_stream
196
+
197
+ bulk_index = {"index" => {"_index" => expanded_index_name}}
198
+ bulk_index["index"].merge!({"_type" => type}) if version_supports_type?
199
+ bulk_index
198
200
  end
199
201
 
200
202
  def default_formatter
201
- time_key = if @data_stream
202
- "@timestamp"
203
- else
204
- :timestamp
205
- end
206
-
203
+ time_key = @data_stream ? "@timestamp" : :timestamp
207
204
  SemanticLogger::Formatters::Raw.new(time_format: :iso_8601, time_key: time_key)
208
205
  end
206
+
207
+ def version_supports_type?
208
+ Gem::Version.new(::Elasticsearch::VERSION) < Gem::Version.new(7)
209
+ end
209
210
  end
210
211
  end
211
212
  end
@@ -26,6 +26,7 @@ module SemanticLogger
26
26
  #
27
27
  # type: [String]
28
28
  # Document type to associate with logs when they are written.
29
+ # Deprecated in Elasticsearch 7.0.0
29
30
  # Default: 'log'
30
31
  #
31
32
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
@@ -0,0 +1,61 @@
1
+ begin
2
+ require "honeybadger"
3
+ rescue LoadError
4
+ raise LoadError, 'Gem honeybadger is required for logging purposes. Please add the gem "honeybadger" to your Gemfile.'
5
+ end
6
+
7
+ # Send log messages to honeybadger events/insights API
8
+ #
9
+ # Example:
10
+ # SemanticLogger.add_appender(appender: :honeybadger_insights)
11
+ #
12
+ module SemanticLogger
13
+ module Appender
14
+ class HoneybadgerInsights < SemanticLogger::Subscriber
15
+ # Honeybadger Appender
16
+ #
17
+ # Parameters
18
+ # level: [:trace | :debug | :info | :warn | :error | :fatal]
19
+ # Override the log level for this appender.
20
+ # Default: :error
21
+ #
22
+ # formatter: [Object|Proc|Symbol|Hash]
23
+ # An instance of a class that implements #call, or a Proc to be used to format
24
+ # the output from this appender
25
+ # Default: Use the built-in formatter (See: #call)
26
+ #
27
+ # filter: [Regexp|Proc]
28
+ # RegExp: Only include log messages where the class name matches the supplied.
29
+ # regular expression. All other messages will be ignored.
30
+ # Proc: Only include log messages where the supplied Proc returns true
31
+ # The Proc must return true or false.
32
+ #
33
+ # host: [String]
34
+ # Name of this host to appear in log messages.
35
+ # Default: SemanticLogger.host
36
+ #
37
+ # application: [String]
38
+ # Name of this application to appear in log messages.
39
+ # Default: SemanticLogger.application
40
+ def initialize(level: :info, **args, &block)
41
+ super(level: level, **args, &block)
42
+ end
43
+
44
+ # Send log to honeybadger events API
45
+ def log(log)
46
+ event = formatter.call(log, self)
47
+
48
+ ::Honeybadger.event(event)
49
+
50
+ true
51
+ end
52
+
53
+ private
54
+
55
+ # Use Raw Formatter by default
56
+ def default_formatter
57
+ SemanticLogger::Formatters::Raw.new(time_key: :ts, time_format: :rfc_3339)
58
+ end
59
+ end
60
+ end
61
+ end
@@ -48,6 +48,11 @@ module SemanticLogger
48
48
  # password: [String]
49
49
  # Password for basic Authentication.
50
50
  #
51
+ # header: [Hash]
52
+ # Custom HTTP headers to send with each request.
53
+ # Default: {} ( do not send any custom headers)
54
+ # Example: {"Authorization" => "Bearer BEARER_TOKEN"}
55
+ #
51
56
  # compress: [true|false]
52
57
  # Whether to compress the JSON string with GZip.
53
58
  # Default: false
@@ -95,6 +100,7 @@ module SemanticLogger
95
100
  ssl: {},
96
101
  username: nil,
97
102
  password: nil,
103
+ header: {},
98
104
  proxy_url: :ENV,
99
105
  open_timeout: 2.0,
100
106
  read_timeout: 1.0,
@@ -118,7 +124,7 @@ module SemanticLogger
118
124
  "Content-Type" => "application/json",
119
125
  "Connection" => "keep-alive",
120
126
  "Keep-Alive" => "300"
121
- }
127
+ }.merge(header)
122
128
  @header["Content-Encoding"] = "gzip" if @compress
123
129
 
124
130
  uri = URI.parse(@url)
@@ -226,7 +232,7 @@ module SemanticLogger
226
232
  end
227
233
  request.basic_auth(@username, @password) if @username
228
234
  response = @http.request(request)
229
- if response.code == "200" || response.code == "201"
235
+ if response.is_a?(Net::HTTPSuccess)
230
236
  true
231
237
  else
232
238
  # Failures are logged to the global semantic logger failsafe logger (Usually stderr or file)
@@ -60,6 +60,10 @@ module SemanticLogger
60
60
  # Name of this application to appear in log messages.
61
61
  # Default: SemanticLogger.application
62
62
  #
63
+ # metrics: [Boolean]
64
+ # Also send metrics only events to rabbitmq.
65
+ # Default: true
66
+ #
63
67
  # RabbitMQ Parameters:
64
68
  #
65
69
  # rabbitmq_host: [String]
@@ -76,13 +80,15 @@ module SemanticLogger
76
80
  # Default: nil
77
81
  #
78
82
  # more parameters supported by Bunny: http://rubybunny.info/articles/connecting.html
79
- def initialize(queue_name: "semantic_logger", rabbitmq_host: nil, metrics: false, **args, &block)
83
+ def initialize(queue_name: "semantic_logger", rabbitmq_host: nil,
84
+ level: nil, formatter: nil, filter: nil, application: nil, environment: nil, host: nil, metrics: true,
85
+ **args, &block)
80
86
  @queue_name = queue_name
81
87
  @rabbitmq_args = args.dup
82
88
  @rabbitmq_args[:host] = rabbitmq_host
83
89
  @rabbitmq_args[:logger] = logger
84
90
 
85
- super(level: level, formatter: formatter, filter: filter, application: application, host: host, metrics: metrics, &block)
91
+ super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
86
92
  reopen
87
93
  end
88
94
 
@@ -101,10 +101,10 @@ module SemanticLogger
101
101
  # open the handles to resources
102
102
  def reopen
103
103
  # Connect to splunk. Connect is a synonym for creating a Service by hand and calling login.
104
- self.service = Splunk.connect(config)
104
+ @service = ::Splunk.connect(config)
105
105
 
106
106
  # The index we are logging to
107
- self.service_index = service.indexes[index]
107
+ @service_index = service.indexes[index]
108
108
  end
109
109
 
110
110
  # Log the message to Splunk
@@ -208,7 +208,7 @@ module SemanticLogger
208
208
 
209
209
  # Flush is called by the semantic_logger during shutdown.
210
210
  def flush
211
- @remote_syslog.flush if @remote_syslog&.respond_to?(:flush)
211
+ @remote_syslog.flush if @remote_syslog.respond_to?(:flush)
212
212
  end
213
213
 
214
214
  # Returns [SemanticLogger::Formatters::Base] default formatter for this Appender depending on the protocal selected
@@ -191,7 +191,7 @@ module SemanticLogger
191
191
  Net::TCPClient.logger = logger
192
192
  Net::TCPClient.logger.name = "Net::TCPClient"
193
193
 
194
- super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, &block)
194
+ super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
195
195
  reopen
196
196
  end
197
197
 
@@ -1,7 +1,6 @@
1
1
  # Send log messages to any standard Ruby logging class.
2
2
  #
3
3
  # Forwards logging call to loggers such as Logger, log4r, etc.
4
- #
5
4
  module SemanticLogger
6
5
  module Appender
7
6
  class Wrapper < SemanticLogger::Subscriber
@@ -56,15 +55,22 @@ module SemanticLogger
56
55
  # trace entries are mapped to debug since :trace is not supported by the
57
56
  # Ruby or Rails Loggers
58
57
  def log(log)
59
- @logger.send(log.level == :trace ? :debug : log.level, formatter.call(log, self))
58
+ level = log.level == :trace ? :debug : log.level
59
+ @logger.send(level, formatter.call(log, self))
60
60
  true
61
61
  end
62
62
 
63
63
  # Flush all pending logs to disk.
64
- # Waits for all sent documents to be writted to disk
64
+ # Waits for all queued log messages to be written to disk.
65
65
  def flush
66
66
  @logger.flush if @logger.respond_to?(:flush)
67
67
  end
68
+
69
+ # Close underlying log
70
+ # Waits for all queued log messages to be written to disk.
71
+ def close
72
+ @logger.close if @logger.respond_to?(:close)
73
+ end
68
74
  end
69
75
  end
70
76
  end
@@ -1,29 +1,30 @@
1
1
  module SemanticLogger
2
2
  module Appender
3
3
  # @formatter:off
4
- autoload :Async, "semantic_logger/appender/async"
5
- autoload :AsyncBatch, "semantic_logger/appender/async_batch"
6
- autoload :Bugsnag, "semantic_logger/appender/bugsnag"
7
- autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
8
- autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
9
- autoload :File, "semantic_logger/appender/file"
10
- autoload :Graylog, "semantic_logger/appender/graylog"
11
- autoload :Honeybadger, "semantic_logger/appender/honeybadger"
12
- autoload :IO, "semantic_logger/appender/io"
13
- autoload :Kafka, "semantic_logger/appender/kafka"
14
- autoload :Sentry, "semantic_logger/appender/sentry"
15
- autoload :Http, "semantic_logger/appender/http"
16
- autoload :MongoDB, "semantic_logger/appender/mongodb"
17
- autoload :NewRelic, "semantic_logger/appender/new_relic"
18
- autoload :NewRelicLogs, "semantic_logger/appender/new_relic_logs"
19
- autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
20
- autoload :Splunk, "semantic_logger/appender/splunk"
21
- autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
22
- autoload :Syslog, "semantic_logger/appender/syslog"
23
- autoload :Tcp, "semantic_logger/appender/tcp"
24
- autoload :Udp, "semantic_logger/appender/udp"
25
- autoload :Wrapper, "semantic_logger/appender/wrapper"
26
- autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
4
+ autoload :Async, "semantic_logger/appender/async"
5
+ autoload :AsyncBatch, "semantic_logger/appender/async_batch"
6
+ autoload :Bugsnag, "semantic_logger/appender/bugsnag"
7
+ autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
8
+ autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
9
+ autoload :File, "semantic_logger/appender/file"
10
+ autoload :Graylog, "semantic_logger/appender/graylog"
11
+ autoload :Honeybadger, "semantic_logger/appender/honeybadger"
12
+ autoload :HoneybadgerInsights, "semantic_logger/appender/honeybadger_insights"
13
+ autoload :IO, "semantic_logger/appender/io"
14
+ autoload :Kafka, "semantic_logger/appender/kafka"
15
+ autoload :Sentry, "semantic_logger/appender/sentry"
16
+ autoload :Http, "semantic_logger/appender/http"
17
+ autoload :MongoDB, "semantic_logger/appender/mongodb"
18
+ autoload :NewRelic, "semantic_logger/appender/new_relic"
19
+ autoload :NewRelicLogs, "semantic_logger/appender/new_relic_logs"
20
+ autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
21
+ autoload :Splunk, "semantic_logger/appender/splunk"
22
+ autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
23
+ autoload :Syslog, "semantic_logger/appender/syslog"
24
+ autoload :Tcp, "semantic_logger/appender/tcp"
25
+ autoload :Udp, "semantic_logger/appender/udp"
26
+ autoload :Wrapper, "semantic_logger/appender/wrapper"
27
+ autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
27
28
  # @formatter:on
28
29
 
29
30
  # Returns [SemanticLogger::Subscriber] appender for the supplied options
@@ -6,6 +6,7 @@ module SemanticLogger
6
6
  def initialize(logger = Processor.logger.dup)
7
7
  @logger = logger
8
8
  @logger.name = self.class.name
9
+ super()
9
10
  end
10
11
 
11
12
  def add(**args, &block)
@@ -45,18 +46,21 @@ module SemanticLogger
45
46
  end
46
47
 
47
48
  def close
48
- to_a.each do |appender|
49
+ closed_appenders = []
50
+ each do |appender|
49
51
  logger.trace "Closing appender: #{appender.name}"
50
- delete(appender)
51
52
  appender.flush
52
53
  appender.close
54
+ closed_appenders << appender
53
55
  rescue Exception => e
54
56
  logger.error "Failed to close appender: #{appender.name}", e
55
57
  end
58
+ # Delete appenders outside the #each above which prevents changes during iteration.
59
+ closed_appenders.each { |appender| delete(appender) }
56
60
  logger.trace "All appenders closed and removed from appender list"
57
61
  end
58
62
 
59
- # After a fork the appender thread is not running, start it if it is not running.
63
+ # After a fork reopen each appender.
60
64
  def reopen
61
65
  each do |appender|
62
66
  next unless appender.respond_to?(:reopen)
@@ -342,9 +342,10 @@ module SemanticLogger
342
342
  # Add result of block to message or payload if not nil
343
343
  if block_given?
344
344
  result = yield(log)
345
- if result.is_a?(String)
345
+ case result
346
+ when String
346
347
  log.message = log.message.nil? ? result : "#{log.message} -- #{result}"
347
- elsif result.is_a?(Hash)
348
+ when Hash
348
349
  log.assign_hash(result)
349
350
  end
350
351
  end
@@ -3,20 +3,9 @@ module SemanticLogger
3
3
  # This is useful for existing gems / libraries that log too much to debug
4
4
  # when most of the debug logging should be at the trace level
5
5
  class DebugAsTraceLogger < Logger
6
- def debug(*args, &block)
7
- trace(*args, &block)
8
- end
9
-
10
- def debug?
11
- trace?
12
- end
13
-
14
- def measure_debug(*args, &block)
15
- measure_trace(*args, &block)
16
- end
17
-
18
- def benchmark_debug(*args, &block)
19
- measure_trace(*args, &block)
20
- end
6
+ alias debug trace
7
+ alias debug? trace?
8
+ alias measure_debug measure_trace
9
+ alias benchmark_debug benchmark_trace
21
10
  end
22
11
  end
@@ -31,7 +31,7 @@ module SemanticLogger
31
31
  private
32
32
 
33
33
  def raw_to_logfmt
34
- @parsed = @raw.slice(time_key, :level, :name, :message, :duration).merge(tag: "success")
34
+ @parsed = @raw.slice(time_key, :level, :name, :message, :duration, :duration_ms).merge(tag: "success")
35
35
  handle_tags
36
36
  handle_payload
37
37
  handle_exception
@@ -86,7 +86,7 @@ module SemanticLogger
86
86
 
87
87
  # Payload
88
88
  def payload
89
- hash[:payload] = log.payload if log.payload&.respond_to?(:empty?) && !log.payload.empty?
89
+ hash[:payload] = log.payload if log.payload.respond_to?(:empty?) && !log.payload.empty?
90
90
  end
91
91
 
92
92
  # Exception
@@ -82,7 +82,10 @@ module SemanticLogger
82
82
  self.log = log
83
83
  self.logger = logger
84
84
 
85
- metric; time; value; format_dimensions
85
+ metric
86
+ time
87
+ value
88
+ format_dimensions
86
89
 
87
90
  # gauge, counter, or cumulative_counter
88
91
  data = {}
@@ -113,7 +116,10 @@ module SemanticLogger
113
116
  self.hash = {}
114
117
  self.log = log
115
118
 
116
- metric; time; value; format_dimensions
119
+ metric
120
+ time
121
+ value
122
+ format_dimensions
117
123
 
118
124
  if log.duration
119
125
  gauges = (data[:gauge] ||= [])
@@ -144,7 +144,9 @@ module SemanticLogger
144
144
  raise(ArgumentError, "payload must be a Hash") unless payload.is_a?(Hash)
145
145
 
146
146
  message = nil if message == ""
147
- return payload if payload.key?(:payload)
147
+ if payload.key?(:payload)
148
+ return message ? payload.merge(message: message) : payload
149
+ end
148
150
 
149
151
  new_payload = {}
150
152
  args = {}
@@ -219,11 +221,11 @@ module SemanticLogger
219
221
 
220
222
  seconds = duration / 1000
221
223
  if seconds >= 86_400.0 # 1 day
222
- "#{(seconds / 86_400).to_i}d #{Time.at(seconds).strftime('%-Hh %-Mm')}"
224
+ "#{(seconds / 86_400).to_i}d #{Time.at(seconds).utc.strftime('%-Hh %-Mm')}"
223
225
  elsif seconds >= 3600.0 # 1 hour
224
- Time.at(seconds).strftime("%-Hh %-Mm")
226
+ Time.at(seconds).utc.strftime("%-Hh %-Mm")
225
227
  elsif seconds >= 60.0 # 1 minute
226
- Time.at(seconds).strftime("%-Mm %-Ss")
228
+ Time.at(seconds).utc.strftime("%-Mm %-Ss")
227
229
  elsif seconds >= 1.0 # 1 second
228
230
  "#{format('%.3f', seconds)}s"
229
231
  else
@@ -248,7 +250,11 @@ module SemanticLogger
248
250
 
249
251
  # Extract the filename and line number from the last entry in the supplied backtrace
250
252
  def extract_file_and_line(stack, short_name = false)
253
+ return unless stack&.size&.positive?
254
+
251
255
  match = CALLER_REGEXP.match(stack.first)
256
+ return unless match
257
+
252
258
  [short_name ? File.basename(match[1]) : match[1], match[2].to_i]
253
259
  end
254
260
 
@@ -256,7 +262,7 @@ module SemanticLogger
256
262
  # in either the backtrace or exception
257
263
  def file_name_and_line(short_name = false)
258
264
  stack = backtrace || exception&.backtrace
259
- extract_file_and_line(stack, short_name) if stack&.size&.positive?
265
+ extract_file_and_line(stack, short_name)
260
266
  end
261
267
 
262
268
  # Strip the standard Rails colorizing from the logged message
@@ -28,6 +28,7 @@ module SemanticLogger
28
28
  # )
29
29
  def initialize(url: "udp://localhost:8125")
30
30
  @url = url
31
+ super()
31
32
  end
32
33
 
33
34
  def reopen
@@ -2,26 +2,26 @@ module SemanticLogger
2
2
  # The SyncProcessor performs logging in the current thread.
3
3
  #
4
4
  # Appenders are designed to only be used by one thread at a time, so all calls
5
- # are mutex protected in case SyncProcessor is being used in a multi-threaded environment.
5
+ # are monitor protected in case SyncProcessor is being used in a multi-threaded environment.
6
6
  class SyncProcessor
7
7
  def add(*args, &block)
8
- @mutex.synchronize { @appenders.add(*args, &block) }
8
+ @monitor.synchronize { @appenders.add(*args, &block) }
9
9
  end
10
10
 
11
11
  def log(*args, &block)
12
- @mutex.synchronize { @appenders.log(*args, &block) }
12
+ @monitor.synchronize { @appenders.log(*args, &block) }
13
13
  end
14
14
 
15
15
  def flush
16
- @mutex.synchronize { @appenders.flush }
16
+ @monitor.synchronize { @appenders.flush }
17
17
  end
18
18
 
19
19
  def close
20
- @mutex.synchronize { @appenders.close }
20
+ @monitor.synchronize { @appenders.close }
21
21
  end
22
22
 
23
23
  def reopen(*args)
24
- @mutex.synchronize { @appenders.reopen(*args) }
24
+ @monitor.synchronize { @appenders.reopen(*args) }
25
25
  end
26
26
 
27
27
  # Allow the internal logger to be overridden from its default of $stderr
@@ -47,7 +47,7 @@ module SemanticLogger
47
47
  attr_reader :appenders
48
48
 
49
49
  def initialize(appenders = nil)
50
- @mutex = Mutex.new
50
+ @monitor = Monitor.new
51
51
  @appenders = appenders || Appenders.new(self.class.logger.dup)
52
52
  end
53
53
 
@@ -4,20 +4,21 @@ module SemanticLogger
4
4
  #
5
5
  # Example:
6
6
  #
7
- # class UserTest < ActiveSupport::TestCase
8
- # describe User do
9
- # let(:capture_logger) { SemanticLogger::Test::CaptureLogEvents.new }
10
- # let(:user) { User.new }
7
+ # class UserTest < ActiveSupport::TestCase
8
+ # describe User do
9
+ # let(:logger) { SemanticLogger::Test::CaptureLogEvents.new }
10
+ # let(:user) { User.new }
11
11
  #
12
- # it "logs message" do
13
- # user.stub(:logger, capture_logger) do
14
- # user.enable!
12
+ # it "logs message" do
13
+ # user.stub(:logger, logger) do
14
+ # user.enable!
15
+ # end
16
+ # assert log = logger.events.first
17
+ # assert_equal "Hello World", log.message
18
+ # assert_equal :info, log.level
15
19
  # end
16
- # assert_equal "Hello World", capture_logger.events.last.message
17
- # assert_equal :info, capture_logger.events.last.level
18
20
  # end
19
21
  # end
20
- # end
21
22
  class CaptureLogEvents < SemanticLogger::Subscriber
22
23
  attr_accessor :events
23
24
 
@@ -28,12 +29,27 @@ module SemanticLogger
28
29
  end
29
30
 
30
31
  def log(log)
32
+ Logger.call_subscribers(log)
31
33
  @events << log
32
34
  end
33
35
 
36
+ # Supports batching of log events
37
+ def batch(logs)
38
+ @events += log
39
+ end
40
+
34
41
  def clear
35
42
  @events.clear
36
43
  end
44
+
45
+ # Support silencing of log messages
46
+ def level_index
47
+ @level_index || SemanticLogger.default_level_index
48
+ end
49
+
50
+ def to_h
51
+ events.map(&:to_h)
52
+ end
37
53
  end
38
54
  end
39
55
  end
@@ -3,14 +3,19 @@ module SemanticLogger
3
3
  module Minitest
4
4
  # Returns [Array<SemanticLogger::Log>] the log events from Semantic Logger
5
5
  # captured whilst executing the supplied block.
6
- def semantic_logger_events(klass = nil, &block)
6
+ #
7
+ # Notes:
8
+ # - All log messages are returned regardless of the global default log level.
9
+ def semantic_logger_events(deprecated_klass = nil, klass: deprecated_klass, silence: :trace, &block)
7
10
  logger = SemanticLogger::Test::CaptureLogEvents.new
8
11
  if klass
9
12
  klass.stub(:logger, logger, &block)
10
- else
11
- SemanticLogger.silence(:trace) do
13
+ elsif silence
14
+ SemanticLogger.silence(silence) do
12
15
  SemanticLogger::Logger.stub(:processor, logger, &block)
13
16
  end
17
+ else
18
+ SemanticLogger::Logger.stub(:processor, logger, &block)
14
19
  end
15
20
  logger.events
16
21
  end
@@ -18,35 +23,65 @@ module SemanticLogger
18
23
  # Verify a single log event has all the required attributes.
19
24
  def assert_semantic_logger_event(event, level: nil, name: nil, message: nil, message_includes: nil,
20
25
  payload: nil, payload_includes: nil,
26
+ exception: nil, exception_includes: nil, backtrace: nil,
21
27
  thread_name: nil, tags: nil, named_tags: nil, context: nil,
28
+ level_index: nil, duration: nil, time: nil,
22
29
  metric: nil, metric_amount: nil, dimensions: nil)
23
- msg = message || message_includes || "no message"
24
- assert event, "Log event missing for message: '#{msg}'"
25
- assert_equal message, event.message if message
26
- assert_includes event.message, message_includes if message_includes
27
- assert_equal name, event.name, -> { "Mismatched log name for message: '#{msg}'" } if name
28
- assert_equal level, event.level, -> { "Mismatched log level for message: '#{msg}'" } if level
30
+ assert event, "No log event occurred"
31
+
32
+ assert_semantic_logger_entry(event, :message, message)
33
+ assert_semantic_logger_entry(event, :name, name)
34
+ assert_semantic_logger_entry(event, :level, level)
35
+ assert_semantic_logger_entry(event, :thread_name, thread_name)
36
+ assert_semantic_logger_entry(event, :tags, tags)
37
+ assert_semantic_logger_entry(event, :named_tags, named_tags)
38
+ assert_semantic_logger_entry(event, :context, context)
39
+ assert_semantic_logger_entry(event, :metric, metric)
40
+ assert_semantic_logger_entry(event, :metric_amount, metric_amount)
41
+ assert_semantic_logger_entry(event, :dimensions, dimensions)
42
+ assert_semantic_logger_entry(event, :level_index, level_index)
43
+ assert_semantic_logger_entry(event, :duration, duration)
44
+ assert_semantic_logger_entry(event, :time, time)
45
+ assert_semantic_logger_entry(event, :exception, exception)
46
+ assert_semantic_logger_entry(event, :backtrace, backtrace)
47
+ assert_semantic_logger_entry(event, :payload, payload)
48
+
49
+ if message_includes
50
+ assert_includes(
51
+ event.message,
52
+ message_includes,
53
+ -> { "Expected message to include '#{message_includes}' in log event #{event.inspect}" }
54
+ )
55
+ end
29
56
 
30
57
  if payload_includes
31
- payload_includes.each_pair do |key, expected_value|
32
- value = event.payload[key]
33
- if expected_value.nil?
34
- assert_nil value, -> { "Mismatched key: #{key.inspect} in log payload: #{event.payload} for message: '#{msg}'" }
35
- else
36
- assert_equal expected_value, value, -> { "Mismatched key: #{key.inspect} in log payload: #{event.payload} for message: '#{msg}'" }
37
- end
58
+ payload_includes.each_pair do |key, expected|
59
+ actual = event.payload[key]
60
+ assert_semantic_logger_entry(event, "payload #{name}", expected, actual)
38
61
  end
39
- elsif payload
40
- assert_equal payload, event.payload, -> { "Mismatched log payload: #{event.payload} for message: '#{msg}'" }
41
62
  end
42
63
 
43
- assert_equal thread_name, event.thread_name, -> { "Mismatched thread_name for message: '#{msg}'" } if thread_name
44
- assert_equal tags, event.tags, -> { "Mismatched tags for message: '#{msg}'" } if tags
45
- assert_equal named_tags, event.named_tags, -> { "Mismatched named_tags for message: '#{msg}'" } if named_tags
46
- assert_equal context, event.context, -> { "Mismatched context for message: '#{msg}'" } if context
47
- assert_equal metric, event.metric, -> { "Mismatched metric for message: '#{msg}'" } if metric
48
- assert_equal metric_amount, event.metric_amount, -> { "Mismatched metric_amount for message: '#{msg}'" } if metric_amount
49
- assert_equal dimensions, event.dimensions, -> { "Mismatched dimensions for message: '#{msg}'" } if dimensions
64
+ if exception_includes
65
+ payload_includes.each_pair do |key, expected|
66
+ actual = event.exception.send(key)
67
+ assert_semantic_logger_entry(event, "Exception #{name}", expected, actual)
68
+ end
69
+ end
70
+ end
71
+
72
+ private
73
+
74
+ def assert_semantic_logger_entry(event, name, expected, actual = event.send(name))
75
+ return if expected.nil?
76
+
77
+ case expected
78
+ when :nil
79
+ assert_nil actual, "Expected nil #{name} for log event: #{event.to_h.inspect}"
80
+ when Class
81
+ assert actual.is_a?(expected), -> { "Type #{expected} expected for #{name} in log event: #{event.to_h.inspect}" }
82
+ else
83
+ assert_equal expected, actual, "Mismatched #{name} for log event: #{event.to_h.inspect}"
84
+ end
50
85
  end
51
86
  end
52
87
  end
@@ -1,3 +1,3 @@
1
1
  module SemanticLogger
2
- VERSION = "4.13.0".freeze
2
+ VERSION = "4.16.1".freeze
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: semantic_logger
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.13.0
4
+ version: 4.16.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Reid Morrison
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-03-26 00:00:00.000000000 Z
11
+ date: 2024-09-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: concurrent-ruby
@@ -44,6 +44,7 @@ files:
44
44
  - lib/semantic_logger/appender/file.rb
45
45
  - lib/semantic_logger/appender/graylog.rb
46
46
  - lib/semantic_logger/appender/honeybadger.rb
47
+ - lib/semantic_logger/appender/honeybadger_insights.rb
47
48
  - lib/semantic_logger/appender/http.rb
48
49
  - lib/semantic_logger/appender/io.rb
49
50
  - lib/semantic_logger/appender/kafka.rb
@@ -98,7 +99,11 @@ files:
98
99
  homepage: https://logger.rocketjob.io
99
100
  licenses:
100
101
  - Apache-2.0
101
- metadata: {}
102
+ metadata:
103
+ bug_tracker_uri: https://github.com/reidmorrison/semantic_logger/issues
104
+ documentation_uri: https://logger.rocketjob.io
105
+ source_code_uri: https://github.com/reidmorrison/semantic_logger/tree/4.16.1
106
+ rubygems_mfa_required: 'true'
102
107
  post_install_message:
103
108
  rdoc_options: []
104
109
  require_paths:
@@ -114,7 +119,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
114
119
  - !ruby/object:Gem::Version
115
120
  version: '0'
116
121
  requirements: []
117
- rubygems_version: 3.4.9
122
+ rubygems_version: 3.5.3
118
123
  signing_key:
119
124
  specification_version: 4
120
125
  summary: Feature rich logging framework, and replacement for existing Ruby & Rails