semantic_logger 4.6.1 → 4.10.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +50 -20
  3. data/Rakefile +7 -7
  4. data/lib/semantic_logger/appender/async.rb +10 -9
  5. data/lib/semantic_logger/appender/async_batch.rb +7 -3
  6. data/lib/semantic_logger/appender/bugsnag.rb +43 -30
  7. data/lib/semantic_logger/appender/elasticsearch.rb +32 -14
  8. data/lib/semantic_logger/appender/elasticsearch_http.rb +4 -4
  9. data/lib/semantic_logger/appender/file.rb +249 -67
  10. data/lib/semantic_logger/appender/graylog.rb +12 -10
  11. data/lib/semantic_logger/appender/honeybadger.rb +3 -3
  12. data/lib/semantic_logger/appender/http.rb +20 -18
  13. data/lib/semantic_logger/appender/io.rb +68 -0
  14. data/lib/semantic_logger/appender/kafka.rb +9 -5
  15. data/lib/semantic_logger/appender/mongodb.rb +6 -6
  16. data/lib/semantic_logger/appender/new_relic.rb +2 -2
  17. data/lib/semantic_logger/appender/rabbitmq.rb +5 -5
  18. data/lib/semantic_logger/appender/sentry.rb +7 -7
  19. data/lib/semantic_logger/appender/sentry_ruby.rb +138 -0
  20. data/lib/semantic_logger/appender/splunk.rb +6 -5
  21. data/lib/semantic_logger/appender/splunk_http.rb +6 -6
  22. data/lib/semantic_logger/appender/syslog.rb +23 -15
  23. data/lib/semantic_logger/appender/tcp.rb +5 -5
  24. data/lib/semantic_logger/appender/udp.rb +2 -2
  25. data/lib/semantic_logger/appender/wrapper.rb +3 -2
  26. data/lib/semantic_logger/appender.rb +42 -36
  27. data/lib/semantic_logger/appenders.rb +34 -30
  28. data/lib/semantic_logger/base.rb +57 -27
  29. data/lib/semantic_logger/formatters/base.rb +9 -3
  30. data/lib/semantic_logger/formatters/color.rb +12 -8
  31. data/lib/semantic_logger/formatters/default.rb +18 -5
  32. data/lib/semantic_logger/formatters/fluentd.rb +3 -3
  33. data/lib/semantic_logger/formatters/json.rb +1 -1
  34. data/lib/semantic_logger/formatters/logfmt.rb +72 -0
  35. data/lib/semantic_logger/formatters/raw.rb +31 -7
  36. data/lib/semantic_logger/formatters/signalfx.rb +10 -9
  37. data/lib/semantic_logger/formatters/syslog.rb +8 -6
  38. data/lib/semantic_logger/formatters/syslog_cee.rb +8 -6
  39. data/lib/semantic_logger/formatters.rb +12 -13
  40. data/lib/semantic_logger/jruby/garbage_collection_logger.rb +4 -2
  41. data/lib/semantic_logger/levels.rb +9 -7
  42. data/lib/semantic_logger/log.rb +51 -61
  43. data/lib/semantic_logger/loggable.rb +8 -1
  44. data/lib/semantic_logger/logger.rb +19 -11
  45. data/lib/semantic_logger/metric/new_relic.rb +3 -3
  46. data/lib/semantic_logger/metric/signalfx.rb +3 -3
  47. data/lib/semantic_logger/metric/statsd.rb +7 -7
  48. data/lib/semantic_logger/processor.rb +9 -7
  49. data/lib/semantic_logger/reporters/minitest.rb +4 -4
  50. data/lib/semantic_logger/semantic_logger.rb +40 -19
  51. data/lib/semantic_logger/subscriber.rb +16 -5
  52. data/lib/semantic_logger/sync.rb +12 -0
  53. data/lib/semantic_logger/sync_processor.rb +43 -0
  54. data/lib/semantic_logger/test/capture_log_events.rb +34 -0
  55. data/lib/semantic_logger/utils.rb +32 -13
  56. data/lib/semantic_logger/version.rb +1 -1
  57. data/lib/semantic_logger.rb +27 -22
  58. metadata +15 -10
@@ -1,13 +1,14 @@
1
1
  begin
2
- require 'syslog_protocol'
2
+ require "syslog_protocol"
3
3
  rescue LoadError
4
- raise LoadError.new('Gem syslog_protocol is required for remote logging using the Syslog protocol. Please add the gem "syslog_protocol" to your Gemfile.')
4
+ raise LoadError,
5
+ 'Gem syslog_protocol is required for remote logging using the Syslog protocol. Please add the gem "syslog_protocol" to your Gemfile.'
5
6
  end
6
7
 
7
8
  module SemanticLogger
8
9
  module Formatters
9
10
  class SyslogCee < Raw
10
- attr_accessor :level_map, :facility
11
+ attr_accessor :level_map, :facility, :max_size
11
12
 
12
13
  # CEE JSON Syslog format
13
14
  # Untested prototype code. Based on documentation only.
@@ -23,9 +24,10 @@ module SemanticLogger
23
24
  # Example:
24
25
  # # Log via udp to a remote syslog server on host: `server1` and port `8514`, using the CEE format.
25
26
  # SemanticLogger.add_appender(appender: :syslog, formatter: syslog_cee, url: 'udp://server1:8514')
26
- def initialize(facility: ::Syslog::LOG_USER, level_map: SemanticLogger::Formatters::Syslog::LevelMap.new)
27
+ def initialize(facility: ::Syslog::LOG_USER, level_map: SemanticLogger::Formatters::Syslog::LevelMap.new, max_size: Integer)
27
28
  @facility = facility
28
29
  @level_map = level_map.is_a?(SemanticLogger::Formatters::Syslog::LevelMap) ? level_map : SemanticLogger::Formatters::Syslog::LevelMap.new(level_map)
30
+ @max_size = max_size
29
31
  super()
30
32
  end
31
33
 
@@ -45,11 +47,11 @@ module SemanticLogger
45
47
  packet = SyslogProtocol::Packet.new
46
48
  packet.hostname = logger.host
47
49
  packet.facility = facility
48
- packet.tag = logger.application.delete(' ')
50
+ packet.tag = logger.application.delete(" ")
49
51
  packet.content = message
50
52
  packet.time = log.time
51
53
  packet.severity = level_map[log.level]
52
- packet.to_s
54
+ packet.assemble(@max_size)
53
55
  end
54
56
  end
55
57
  end
@@ -1,16 +1,15 @@
1
1
  module SemanticLogger
2
2
  module Formatters
3
- # @formatter:off
4
- autoload :Base, 'semantic_logger/formatters/base'
5
- autoload :Color, 'semantic_logger/formatters/color'
6
- autoload :Default, 'semantic_logger/formatters/default'
7
- autoload :Json, 'semantic_logger/formatters/json'
8
- autoload :Raw, 'semantic_logger/formatters/raw'
9
- autoload :OneLine, 'semantic_logger/formatters/one_line'
10
- autoload :Signalfx, 'semantic_logger/formatters/signalfx'
11
- autoload :Syslog, 'semantic_logger/formatters/syslog'
12
- autoload :Fluentd, 'semantic_logger/formatters/fluentd'
13
- # @formatter:on
3
+ autoload :Base, "semantic_logger/formatters/base"
4
+ autoload :Color, "semantic_logger/formatters/color"
5
+ autoload :Default, "semantic_logger/formatters/default"
6
+ autoload :Json, "semantic_logger/formatters/json"
7
+ autoload :Raw, "semantic_logger/formatters/raw"
8
+ autoload :OneLine, "semantic_logger/formatters/one_line"
9
+ autoload :Signalfx, "semantic_logger/formatters/signalfx"
10
+ autoload :Syslog, "semantic_logger/formatters/syslog"
11
+ autoload :Fluentd, "semantic_logger/formatters/fluentd"
12
+ autoload :Logfmt, "semantic_logger/formatters/logfmt"
14
13
 
15
14
  # Return formatter that responds to call.
16
15
  #
@@ -22,10 +21,10 @@ module SemanticLogger
22
21
  # - Any object that responds to :call
23
22
  def self.factory(formatter)
24
23
  if formatter.is_a?(Symbol)
25
- SemanticLogger::Utils.constantize_symbol(formatter, 'SemanticLogger::Formatters').new
24
+ SemanticLogger::Utils.constantize_symbol(formatter, "SemanticLogger::Formatters").new
26
25
  elsif formatter.is_a?(Hash) && formatter.size.positive?
27
26
  fmt, options = formatter.first
28
- SemanticLogger::Utils.constantize_symbol(fmt.to_sym, 'SemanticLogger::Formatters').new(options)
27
+ SemanticLogger::Utils.constantize_symbol(fmt.to_sym, "SemanticLogger::Formatters").new(**options)
29
28
  elsif formatter.respond_to?(:call)
30
29
  formatter
31
30
  else
@@ -12,7 +12,9 @@ module SemanticLogger
12
12
  # Must leave the method name as-is so that it can be found by Java
13
13
  def handleNotification(notification, _)
14
14
  # Only care about GARBAGE_COLLECTION_NOTIFICATION notifications
15
- return unless notification.get_type == Java::ComSunManagement::GarbageCollectionNotificationInfo::GARBAGE_COLLECTION_NOTIFICATION
15
+ unless notification.get_type == Java::ComSunManagement::GarbageCollectionNotificationInfo::GARBAGE_COLLECTION_NOTIFICATION
16
+ return
17
+ end
16
18
 
17
19
  info = Java::ComSunManagement::GarbageCollectionNotificationInfo.from(notification.user_data)
18
20
  gc_info = info.gc_info
@@ -20,7 +22,7 @@ module SemanticLogger
20
22
 
21
23
  return unless duration >= @min_microseconds
22
24
 
23
- SemanticLogger['GarbageCollector'].measure_warn(
25
+ SemanticLogger["GarbageCollector"].measure_warn(
24
26
  "Garbage Collection completed: #{info.gc_name} ##{gc_info.id}",
25
27
  duration: duration.to_f / 1000
26
28
  )
@@ -16,17 +16,19 @@ module SemanticLogger
16
16
  LEVELS.index(level)
17
17
  elsif level.is_a?(Integer) && defined?(::Logger::Severity)
18
18
  # Mapping of Rails and Ruby Logger levels to SemanticLogger levels
19
- @map_levels ||= begin
20
- levels = []
21
- ::Logger::Severity.constants.each do |constant|
22
- levels[::Logger::Severity.const_get(constant)] =
23
- LEVELS.find_index(constant.downcase.to_sym) || LEVELS.find_index(:error)
19
+ @map_levels ||=
20
+ begin
21
+ levels = []
22
+ ::Logger::Severity.constants.each do |constant|
23
+ levels[::Logger::Severity.const_get(constant)] =
24
+ LEVELS.find_index(constant.downcase.to_sym) || LEVELS.find_index(:error)
25
+ end
26
+ levels
24
27
  end
25
- levels
26
- end
27
28
  @map_levels[level]
28
29
  end
29
30
  raise "Invalid level:#{level.inspect} being requested. Must be one of #{LEVELS.inspect}" unless index
31
+
30
32
  index
31
33
  end
32
34
 
@@ -47,6 +47,12 @@ module SemanticLogger
47
47
  # context [Hash]
48
48
  # Named contexts that were captured when the log entry was created.
49
49
  class Log
50
+ # Keys passed in without a payload that will be extracted and the remainder passed into the payload.
51
+ NON_PAYLOAD_KEYS = %i[message exception backtrace exception
52
+ duration min_duration
53
+ log_exception on_exception_level
54
+ metric metric_amount dimensions].freeze
55
+
50
56
  attr_accessor :level, :level_index, :name, :message, :time, :duration,
51
57
  :payload, :exception, :thread_name, :backtrace,
52
58
  :tags, :named_tags, :context,
@@ -79,20 +85,13 @@ module SemanticLogger
79
85
  log_exception: :full,
80
86
  on_exception_level: nil,
81
87
  dimensions: nil)
82
- # Elastic logging: Log when :duration exceeds :min_duration
83
- # Except if there is an exception when it will always be logged
84
- if duration
85
- self.duration = duration
86
- return false if (duration < min_duration) && exception.nil?
87
- end
88
88
 
89
- self.message = message
90
- if payload && payload.is_a?(Hash)
91
- self.payload = payload
92
- elsif payload
93
- self.message = message.nil? ? payload.to_s : "#{message} -- #{payload}"
94
- self.payload = nil
95
- end
89
+ self.message = message
90
+ self.payload = payload
91
+ self.duration = duration
92
+ self.metric = metric
93
+ self.metric_amount = metric_amount
94
+ self.dimensions = dimensions
96
95
 
97
96
  if exception
98
97
  case log_exception
@@ -113,57 +112,43 @@ module SemanticLogger
113
112
  end
114
113
  end
115
114
 
115
+ # Elastic logging: Log when :duration exceeds :min_duration
116
+ # Except if there is an exception when it will always be logged
117
+ return false if duration && ((duration < min_duration) && exception.nil?)
118
+
116
119
  if backtrace
117
120
  self.backtrace = Utils.extract_backtrace(backtrace)
118
121
  elsif level_index >= SemanticLogger.backtrace_level_index
119
- self.backtrace = Utils.extract_backtrace
120
- end
121
-
122
- if metric
123
- self.metric = metric
124
- self.metric_amount = metric_amount
125
- self.dimensions = dimensions
122
+ self.backtrace = Utils.extract_backtrace(caller)
126
123
  end
127
124
 
128
125
  true
129
126
  end
130
127
 
131
- # Assign positional arguments to this log entry, supplying defaults where applicable
132
- #
133
- # Returns [true|false] whether this log entry should be logged
134
- #
135
- # Example:
136
- # logger.info('value', :debug, 0, "hello world")
137
- def assign_positional(message = nil, payload = nil, exception = nil)
138
- # Exception being logged?
139
- # Under JRuby a java exception is not a Ruby Exception
140
- # Java::JavaLang::ClassCastException.new.is_a?(Exception) => false
141
- if exception.nil? && payload.nil? && message.respond_to?(:backtrace) && message.respond_to?(:message)
142
- exception = message
143
- message = nil
144
- elsif exception.nil? && payload && payload.respond_to?(:backtrace) && payload.respond_to?(:message)
145
- exception = payload
146
- payload = nil
147
- elsif payload && !payload.is_a?(Hash)
148
- message = message.nil? ? payload : "#{message} -- #{payload}"
149
- payload = nil
150
- end
151
-
152
- # Add result of block as message or payload if not nil
153
- if block_given? && (result = yield)
154
- if result.is_a?(String)
155
- message = message.nil? ? result : "#{message} -- #{result}"
156
- assign(message: message, payload: payload, exception: exception)
157
- elsif message.nil? && result.is_a?(Hash) && %i[message payload exception].any? { |k| result.key? k }
158
- assign(result)
159
- elsif payload&.respond_to?(:merge)
160
- assign(message: message, payload: payload.merge(result), exception: exception)
128
+ # Assign known keys to self, all other keys to the payload.
129
+ def assign_hash(hash)
130
+ self.payload ||= {}
131
+ hash.each_pair do |key, value|
132
+ if respond_to?("#{key}=".to_sym)
133
+ public_send("#{key}=".to_sym, value)
161
134
  else
162
- assign(message: message, payload: result, exception: exception)
135
+ payload[key] = value
163
136
  end
164
- else
165
- assign(message: message, payload: payload, exception: exception)
166
137
  end
138
+ self.payload = nil if payload.empty?
139
+ self
140
+ end
141
+
142
+ # Extract the arguments from a Hash Payload
143
+ def extract_arguments(payload)
144
+ raise(ArgumentError, "payload must be a Hash") unless payload.is_a?(Hash)
145
+
146
+ return payload if payload.key?(:payload)
147
+
148
+ args = {}
149
+ payload.each_key { |key| args[key] = payload.delete(key) if NON_PAYLOAD_KEYS.include?(key) }
150
+ args[:payload] = payload unless payload.empty?
151
+ args
167
152
  end
168
153
 
169
154
  MAX_EXCEPTIONS_TO_UNWRAP = 5
@@ -191,7 +176,7 @@ module SemanticLogger
191
176
 
192
177
  # Returns [String] the exception backtrace including all of the child / caused by exceptions
193
178
  def backtrace_to_s
194
- trace = ''
179
+ trace = ""
195
180
  each_exception do |exception, i|
196
181
  if i.zero?
197
182
  trace = (exception.backtrace || []).join("\n")
@@ -212,6 +197,7 @@ module SemanticLogger
212
197
  else
213
198
  def duration_to_s
214
199
  return unless duration
200
+
215
201
  duration < 10.0 ? "#{format('%.3f', duration)}ms" : "#{format('%.1f', duration)}ms"
216
202
  end
217
203
  end
@@ -219,13 +205,14 @@ module SemanticLogger
219
205
  # Returns [String] the duration in human readable form
220
206
  def duration_human
221
207
  return nil unless duration
208
+
222
209
  seconds = duration / 1000
223
210
  if seconds >= 86_400.0 # 1 day
224
211
  "#{(seconds / 86_400).to_i}d #{Time.at(seconds).strftime('%-Hh %-Mm')}"
225
212
  elsif seconds >= 3600.0 # 1 hour
226
- Time.at(seconds).strftime('%-Hh %-Mm')
213
+ Time.at(seconds).strftime("%-Hh %-Mm")
227
214
  elsif seconds >= 60.0 # 1 minute
228
- Time.at(seconds).strftime('%-Mm %-Ss')
215
+ Time.at(seconds).strftime("%-Mm %-Ss")
229
216
  elsif seconds >= 1.0 # 1 second
230
217
  "#{format('%.3f', seconds)}s"
231
218
  else
@@ -238,9 +225,7 @@ module SemanticLogger
238
225
  level.to_s[0..0].upcase
239
226
  end
240
227
 
241
- # Returns [String] the available process info
242
- # Example:
243
- # 18934:thread 23 test_logging.rb:51
228
+ # DEPRECATED
244
229
  def process_info(thread_name_length = 30)
245
230
  file, line = file_name_and_line(true)
246
231
  file_name = " #{file}:#{line}" if file
@@ -248,7 +233,7 @@ module SemanticLogger
248
233
  "#{$$}:#{format("%.#{thread_name_length}s", thread_name)}#{file_name}"
249
234
  end
250
235
 
251
- CALLER_REGEXP = /^(.*):(\d+).*/
236
+ CALLER_REGEXP = /^(.*):(\d+).*/.freeze
252
237
 
253
238
  # Extract the filename and line number from the last entry in the supplied backtrace
254
239
  def extract_file_and_line(stack, short_name = false)
@@ -265,7 +250,7 @@ module SemanticLogger
265
250
 
266
251
  # Strip the standard Rails colorizing from the logged message
267
252
  def cleansed_message
268
- message.to_s.gsub(/(\e(\[([\d;]*[mz]?))?)?/, '').strip
253
+ message.to_s.gsub(/(\e(\[([\d;]*[mz]?))?)?/, "").strip
269
254
  end
270
255
 
271
256
  # Return the payload in text form
@@ -279,6 +264,11 @@ module SemanticLogger
279
264
  !(payload.nil? || (payload.respond_to?(:empty?) && payload.empty?))
280
265
  end
281
266
 
267
+ def to_h(host = SemanticLogger.host, application = SemanticLogger.application, environment = SemanticLogger.environment)
268
+ logger = Struct.new(:host, :application, :environment).new(host, application, environment)
269
+ SemanticLogger::Formatters::Raw.new.call(self, logger)
270
+ end
271
+
282
272
  # Lazy initializes the context hash and assigns a key value pair.
283
273
  def set_context(key, value)
284
274
  (self.context ||= {})[key] = value
@@ -8,7 +8,7 @@
8
8
  # Example:
9
9
  # require 'semantic_logger'
10
10
  # SemanticLogger.default_level = :debug
11
- # SemanticLogger.add_appender(io: STDOUT, formatter: :color)
11
+ # SemanticLogger.add_appender(io: $stdout, formatter: :color)
12
12
  #
13
13
  # class ExternalSupplier
14
14
  # # Create class and instance logger methods
@@ -32,7 +32,14 @@ module SemanticLogger
32
32
  module Loggable
33
33
  def self.included(base)
34
34
  base.extend ClassMethods
35
+ base.singleton_class.class_eval do
36
+ undef_method :logger if method_defined?(:logger)
37
+ undef_method :logger= if method_defined?(:logger=)
38
+ end
35
39
  base.class_eval do
40
+ undef_method :logger if method_defined?(:logger)
41
+ undef_method :logger= if method_defined?(:logger=)
42
+
36
43
  # Returns [SemanticLogger::Logger] class level logger
37
44
  def self.logger
38
45
  @semantic_logger ||= SemanticLogger[self]
@@ -1,4 +1,4 @@
1
- require 'concurrent'
1
+ require "concurrent"
2
2
  module SemanticLogger
3
3
  # Logger stores the class name to be used for all log messages so that every
4
4
  # log message written by this instance will include the class name
@@ -9,7 +9,7 @@ module SemanticLogger
9
9
  subscriber = block || object
10
10
 
11
11
  unless subscriber.is_a?(Proc) || subscriber.respond_to?(:call)
12
- raise('When supplying an on_log subscriber, it must support the #call method')
12
+ raise("When supplying an on_log subscriber, it must support the #call method")
13
13
  end
14
14
 
15
15
  subscribers = (@subscribers ||= Concurrent::Array.new)
@@ -21,7 +21,19 @@ module SemanticLogger
21
21
  end
22
22
 
23
23
  def self.processor
24
- @processor
24
+ @processor ||= Processor.new
25
+ end
26
+
27
+ # Switch to the synchronous processor
28
+ def self.sync!
29
+ return if @processor.is_a?(SyncProcessor)
30
+
31
+ @processor = SyncProcessor.new(@processor&.appenders)
32
+ end
33
+
34
+ # Running without the background logging thread?
35
+ def self.sync?
36
+ processor.is_a?(SyncProcessor)
25
37
  end
26
38
 
27
39
  # Returns a Logger instance
@@ -63,20 +75,16 @@ module SemanticLogger
63
75
  Logger.processor.log(log)
64
76
  end
65
77
 
66
- private
67
-
68
- @processor = Processor.new
78
+ @processor = nil
69
79
  @subscribers = nil
70
80
 
71
81
  def self.call_subscribers(log)
72
82
  return unless @subscribers
73
83
 
74
84
  @subscribers.each do |subscriber|
75
- begin
76
- subscriber.call(log)
77
- rescue Exception => exc
78
- self.class.processor.logger.error('Exception calling :on_log subscriber', exc)
79
- end
85
+ subscriber.call(log)
86
+ rescue Exception => e
87
+ processor.logger.error("Exception calling :on_log subscriber", e)
80
88
  end
81
89
  end
82
90
  end
@@ -1,7 +1,7 @@
1
1
  begin
2
- require 'newrelic_rpm'
2
+ require "newrelic_rpm"
3
3
  rescue LoadError
4
- raise LoadError.new('Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.')
4
+ raise LoadError, 'Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.'
5
5
  end
6
6
 
7
7
  # Send Metrics to NewRelic
@@ -37,7 +37,7 @@ module SemanticLogger
37
37
  # regular expression. All other messages will be ignored.
38
38
  # Proc: Only include log messages where the supplied Proc returns true
39
39
  # The Proc must return true or false.
40
- def initialize(prefix: 'Custom', **args, &block)
40
+ def initialize(prefix: "Custom", **args, &block)
41
41
  @prefix = prefix
42
42
  super(**args, &block)
43
43
  end
@@ -10,7 +10,7 @@ module SemanticLogger
10
10
  class Signalfx < SemanticLogger::Appender::Http
11
11
  attr_reader :full_url
12
12
 
13
- END_POINT = 'v2/datapoint'.freeze
13
+ END_POINT = "v2/datapoint".freeze
14
14
 
15
15
  # Create SignalFx metrics appender.
16
16
  #
@@ -75,7 +75,7 @@ module SemanticLogger
75
75
  # end
76
76
  def initialize(token:,
77
77
  dimensions: nil,
78
- url: 'https://ingest.signalfx.com',
78
+ url: "https://ingest.signalfx.com",
79
79
  formatter: nil,
80
80
  **args,
81
81
  &block)
@@ -84,7 +84,7 @@ module SemanticLogger
84
84
 
85
85
  super(url: url, formatter: formatter, **args, &block)
86
86
 
87
- @header['X-SF-TOKEN'] = token
87
+ @header["X-SF-TOKEN"] = token
88
88
  @full_url = "#{url}/#{END_POINT}"
89
89
  end
90
90
 
@@ -1,8 +1,8 @@
1
- require 'uri'
1
+ require "uri"
2
2
  begin
3
- require 'statsd-ruby'
3
+ require "statsd-ruby"
4
4
  rescue LoadError
5
- raise LoadError.new('Gem statsd-ruby is required for logging metrics. Please add the gem "statsd-ruby" to your Gemfile.')
5
+ raise LoadError, 'Gem statsd-ruby is required for logging metrics. Please add the gem "statsd-ruby" to your Gemfile.'
6
6
  end
7
7
 
8
8
  module SemanticLogger
@@ -26,17 +26,17 @@ module SemanticLogger
26
26
  # metric: :statsd,
27
27
  # url: 'localhost:8125'
28
28
  # )
29
- def initialize(url: 'udp://localhost:8125')
29
+ def initialize(url: "udp://localhost:8125")
30
30
  @url = url
31
31
  end
32
32
 
33
33
  def reopen
34
34
  uri = URI.parse(@url)
35
- raise('Statsd only supports udp. Example: "udp://localhost:8125"') if uri.scheme != 'udp'
35
+ raise('Statsd only supports udp. Example: "udp://localhost:8125"') if uri.scheme != "udp"
36
36
 
37
37
  @statsd = ::Statsd.new(uri.host, uri.port)
38
- path = uri.path.chomp('/')
39
- @statsd.namespace = path.sub('/', '') if path != ''
38
+ path = uri.path.chomp("/")
39
+ @statsd.namespace = path.sub("/", "") if path != ""
40
40
  end
41
41
 
42
42
  def log(log)
@@ -1,7 +1,7 @@
1
1
  module SemanticLogger
2
2
  # Thread that submits and processes log requests
3
3
  class Processor < Appender::Async
4
- # Allow the internal logger to be overridden from its default of STDERR
4
+ # Allow the internal logger to be overridden from its default of $stderr
5
5
  # Can be replaced with another Ruby logger or Rails logger, but never to
6
6
  # SemanticLogger::Logger itself since it is for reporting problems
7
7
  # while trying to log to the various appenders
@@ -11,13 +11,14 @@ module SemanticLogger
11
11
 
12
12
  # Internal logger for SemanticLogger
13
13
  # For example when an appender is not working etc..
14
- # By default logs to STDERR
14
+ # By default logs to $stderr
15
15
  def self.logger
16
- @logger ||= begin
17
- l = SemanticLogger::Appender::File.new(io: STDERR, level: :warn)
18
- l.name = name
19
- l
20
- end
16
+ @logger ||=
17
+ begin
18
+ l = SemanticLogger::Appender::IO.new($stderr, level: :warn)
19
+ l.name = name
20
+ l
21
+ end
21
22
  end
22
23
 
23
24
  attr_reader :appenders
@@ -30,6 +31,7 @@ module SemanticLogger
30
31
  # Start the appender thread
31
32
  def start
32
33
  return false if active?
34
+
33
35
  thread
34
36
  true
35
37
  end
@@ -23,7 +23,7 @@ module SemanticLogger
23
23
  class Minitest < ::Minitest::AbstractReporter
24
24
  include SemanticLogger::Loggable
25
25
 
26
- logger.name = 'Minitest'
26
+ logger.name = "Minitest"
27
27
 
28
28
  attr_accessor :io
29
29
 
@@ -33,11 +33,11 @@ module SemanticLogger
33
33
 
34
34
  def after_test(test)
35
35
  if test.error?
36
- logger.benchmark_error("FAIL #{test.class_name} #{test.name}", duration: test.time * 1_000, metric: 'minitest/fail')
36
+ logger.benchmark_error("FAIL #{test.class_name} #{test.name}", duration: test.time * 1_000, metric: "minitest/fail")
37
37
  elsif test.skipped?
38
- logger.benchmark_warn("SKIP #{test.class_name} #{test.name}", duration: test.time * 1_000, metric: 'minitest/skip')
38
+ logger.benchmark_warn("SKIP #{test.class_name} #{test.name}", duration: test.time * 1_000, metric: "minitest/skip")
39
39
  else
40
- logger.benchmark_info("PASS #{test.class_name} #{test.name}", duration: test.time * 1_000, metric: 'minitest/pass')
40
+ logger.benchmark_info("PASS #{test.class_name} #{test.name}", duration: test.time * 1_000, metric: "minitest/pass")
41
41
  end
42
42
  end
43
43
  end