semantic_logger 4.6.1 → 4.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +50 -20
  3. data/Rakefile +7 -7
  4. data/lib/semantic_logger/appender/async.rb +10 -9
  5. data/lib/semantic_logger/appender/async_batch.rb +7 -3
  6. data/lib/semantic_logger/appender/bugsnag.rb +43 -30
  7. data/lib/semantic_logger/appender/elasticsearch.rb +32 -14
  8. data/lib/semantic_logger/appender/elasticsearch_http.rb +4 -4
  9. data/lib/semantic_logger/appender/file.rb +249 -67
  10. data/lib/semantic_logger/appender/graylog.rb +12 -10
  11. data/lib/semantic_logger/appender/honeybadger.rb +3 -3
  12. data/lib/semantic_logger/appender/http.rb +20 -18
  13. data/lib/semantic_logger/appender/io.rb +68 -0
  14. data/lib/semantic_logger/appender/kafka.rb +9 -5
  15. data/lib/semantic_logger/appender/mongodb.rb +6 -6
  16. data/lib/semantic_logger/appender/new_relic.rb +2 -2
  17. data/lib/semantic_logger/appender/rabbitmq.rb +5 -5
  18. data/lib/semantic_logger/appender/sentry.rb +7 -7
  19. data/lib/semantic_logger/appender/sentry_ruby.rb +138 -0
  20. data/lib/semantic_logger/appender/splunk.rb +6 -5
  21. data/lib/semantic_logger/appender/splunk_http.rb +6 -6
  22. data/lib/semantic_logger/appender/syslog.rb +23 -15
  23. data/lib/semantic_logger/appender/tcp.rb +5 -5
  24. data/lib/semantic_logger/appender/udp.rb +2 -2
  25. data/lib/semantic_logger/appender/wrapper.rb +3 -2
  26. data/lib/semantic_logger/appender.rb +42 -36
  27. data/lib/semantic_logger/appenders.rb +34 -30
  28. data/lib/semantic_logger/base.rb +57 -27
  29. data/lib/semantic_logger/formatters/base.rb +9 -3
  30. data/lib/semantic_logger/formatters/color.rb +12 -8
  31. data/lib/semantic_logger/formatters/default.rb +18 -5
  32. data/lib/semantic_logger/formatters/fluentd.rb +3 -3
  33. data/lib/semantic_logger/formatters/json.rb +1 -1
  34. data/lib/semantic_logger/formatters/logfmt.rb +72 -0
  35. data/lib/semantic_logger/formatters/raw.rb +31 -7
  36. data/lib/semantic_logger/formatters/signalfx.rb +10 -9
  37. data/lib/semantic_logger/formatters/syslog.rb +8 -6
  38. data/lib/semantic_logger/formatters/syslog_cee.rb +8 -6
  39. data/lib/semantic_logger/formatters.rb +12 -13
  40. data/lib/semantic_logger/jruby/garbage_collection_logger.rb +4 -2
  41. data/lib/semantic_logger/levels.rb +9 -7
  42. data/lib/semantic_logger/log.rb +51 -61
  43. data/lib/semantic_logger/loggable.rb +8 -1
  44. data/lib/semantic_logger/logger.rb +19 -11
  45. data/lib/semantic_logger/metric/new_relic.rb +3 -3
  46. data/lib/semantic_logger/metric/signalfx.rb +3 -3
  47. data/lib/semantic_logger/metric/statsd.rb +7 -7
  48. data/lib/semantic_logger/processor.rb +9 -7
  49. data/lib/semantic_logger/reporters/minitest.rb +4 -4
  50. data/lib/semantic_logger/semantic_logger.rb +40 -19
  51. data/lib/semantic_logger/subscriber.rb +16 -5
  52. data/lib/semantic_logger/sync.rb +12 -0
  53. data/lib/semantic_logger/sync_processor.rb +43 -0
  54. data/lib/semantic_logger/test/capture_log_events.rb +34 -0
  55. data/lib/semantic_logger/utils.rb +32 -13
  56. data/lib/semantic_logger/version.rb +1 -1
  57. data/lib/semantic_logger.rb +27 -22
  58. metadata +15 -10
@@ -10,59 +10,63 @@ module SemanticLogger
10
10
 
11
11
  def add(**args, &block)
12
12
  appender = SemanticLogger::Appender.factory(**args, &block)
13
+
14
+ if appender.respond_to?(:console_output?) && appender.console_output? && console_output?
15
+ logger.warn "Ignoring attempt to add a second console appender: #{appender.class.name} since it would result in duplicate console output."
16
+ return
17
+ end
18
+
13
19
  self << appender
14
20
  appender
15
21
  end
16
22
 
23
+ # Whether any of the existing appenders already output to the console?
24
+ # I.e. Writes to stdout or stderr.
25
+ def console_output?
26
+ any? { |appender| appender.respond_to?(:console_output?) && appender.console_output? }
27
+ end
28
+
17
29
  def log(log)
18
30
  each do |appender|
19
- begin
20
- appender.log(log) if appender.should_log?(log)
21
- rescue Exception => exc
22
- logger.error "Failed to log to appender: #{appender.name}", exc
23
- end
31
+ appender.log(log) if appender.should_log?(log)
32
+ rescue Exception => e
33
+ logger.error "Failed to log to appender: #{appender.name}", e
24
34
  end
25
35
  end
26
36
 
27
37
  def flush
28
38
  each do |appender|
29
- begin
30
- logger.trace "Flushing appender: #{appender.name}"
31
- appender.flush
32
- rescue Exception => exc
33
- logger.error "Failed to flush appender: #{appender.name}", exc
34
- end
39
+ logger.trace "Flushing appender: #{appender.name}"
40
+ appender.flush
41
+ rescue Exception => e
42
+ logger.error "Failed to flush appender: #{appender.name}", e
35
43
  end
36
- logger.trace 'All appenders flushed'
44
+ logger.trace "All appenders flushed"
37
45
  end
38
46
 
39
47
  def close
40
- each do |appender|
41
- begin
42
- logger.trace "Closing appender: #{appender.name}"
43
- appender.flush
44
- appender.close
45
- delete(appender)
46
- rescue Exception => exc
47
- logger.error "Failed to close appender: #{appender.name}", exc
48
- end
48
+ to_a.each do |appender|
49
+ logger.trace "Closing appender: #{appender.name}"
50
+ delete(appender)
51
+ appender.flush
52
+ appender.close
53
+ rescue Exception => e
54
+ logger.error "Failed to close appender: #{appender.name}", e
49
55
  end
50
- logger.trace 'All appenders closed and removed from appender list'
56
+ logger.trace "All appenders closed and removed from appender list"
51
57
  end
52
58
 
53
59
  # After a fork the appender thread is not running, start it if it is not running.
54
60
  def reopen
55
61
  each do |appender|
56
- begin
57
- next unless appender.respond_to?(:reopen)
62
+ next unless appender.respond_to?(:reopen)
58
63
 
59
- logger.trace "Reopening appender: #{appender.name}"
60
- appender.reopen
61
- rescue Exception => exc
62
- logger.error "Failed to re-open appender: #{appender.name}", exc
63
- end
64
+ logger.trace "Reopening appender: #{appender.name}"
65
+ appender.reopen
66
+ rescue Exception => e
67
+ logger.error "Failed to re-open appender: #{appender.name}", e
64
68
  end
65
- logger.trace 'All appenders re-opened'
69
+ logger.trace "All appenders re-opened"
66
70
  end
67
71
  end
68
72
  end
@@ -63,7 +63,7 @@ module SemanticLogger
63
63
  # SemanticLogger.default_level = :info
64
64
  #
65
65
  # # Log to screen
66
- # SemanticLogger.add_appender(io: STDOUT, formatter: :color)
66
+ # SemanticLogger.add_appender(io: $stdout, formatter: :color)
67
67
  #
68
68
  # # And log to a file at the same time
69
69
  # SemanticLogger.add_appender(file_name: 'application.log', formatter: :color)
@@ -126,7 +126,7 @@ module SemanticLogger
126
126
  # Log a thread backtrace
127
127
  def backtrace(thread: Thread.current,
128
128
  level: :warn,
129
- message: 'Backtrace:',
129
+ message: "Backtrace:",
130
130
  payload: nil,
131
131
  metric: nil,
132
132
  metric_amount: nil)
@@ -136,7 +136,7 @@ module SemanticLogger
136
136
 
137
137
  backtrace =
138
138
  if thread == Thread.current
139
- Utils.extract_backtrace
139
+ Utils.extract_backtrace(caller)
140
140
  else
141
141
  log.thread_name = thread.name
142
142
  log.tags = (thread[:semantic_logger_tags] || []).clone
@@ -188,9 +188,11 @@ module SemanticLogger
188
188
  # - For better performance with clean tags, see `SemanticLogger.tagged`.
189
189
  def tagged(*tags, &block)
190
190
  # Allow named tags to be passed into the logger
191
- if tags.size == 1
191
+ # Rails::Rack::Logger passes logs as an array with a single argument
192
+ if tags.size == 1 && !tags.first.is_a?(Array)
192
193
  tag = tags[0]
193
- return yield if tag.nil? || tag == ''
194
+ return yield if tag.nil? || tag == ""
195
+
194
196
  return tag.is_a?(Hash) ? SemanticLogger.named_tagged(tag, &block) : SemanticLogger.fast_tag(tag.to_s, &block)
195
197
  end
196
198
 
@@ -240,7 +242,7 @@ module SemanticLogger
240
242
 
241
243
  # Write log data to underlying data storage
242
244
  def log(_log_)
243
- raise NotImplementedError, 'Logging Appender must implement #log(log)'
245
+ raise NotImplementedError, "Logging Appender must implement #log(log)"
244
246
  end
245
247
 
246
248
  # Whether this log entry meets the criteria to be logged by this appender.
@@ -262,14 +264,22 @@ module SemanticLogger
262
264
  # For example if set to :warn, this appender would only log :warn and :fatal
263
265
  # log messages when other appenders could be logging :info and lower
264
266
  #
265
- # filter [Regexp|Proc]
267
+ # filter [Regexp|Proc|Module]
266
268
  # RegExp: Only include log messages where the class name matches the supplied
267
269
  # regular expression. All other messages will be ignored
268
270
  # Proc: Only include log messages where the supplied Proc returns true
269
271
  # The Proc must return true or false
272
+ # Module: A module that implements `.call`. For example:
273
+ # module ComplexFilter
274
+ # def self.call(log)
275
+ # (/\AExclude/ =~ log.message).nil?
276
+ # end
277
+ # end
270
278
  def initialize(klass, level = nil, filter = nil)
271
- # Support filtering all messages to this logger using a Regular Expression or Proc
272
- raise ':filter must be a Regexp or Proc' unless filter.nil? || filter.is_a?(Regexp) || filter.is_a?(Proc)
279
+ # Support filtering all messages to this logger instance.
280
+ unless filter.nil? || filter.is_a?(Regexp) || filter.is_a?(Proc) || filter.respond_to?(:call)
281
+ raise ":filter must be a Regexp, Proc, or implement :call"
282
+ end
273
283
 
274
284
  @filter = filter.is_a?(Regexp) ? filter.freeze : filter
275
285
  @name = klass.is_a?(String) ? klass : klass.name
@@ -302,23 +312,42 @@ module SemanticLogger
302
312
  end
303
313
 
304
314
  # Log message at the specified level
305
- def log_internal(level, index, message = nil, payload = nil, exception = nil, &block)
306
- log = Log.new(name, level, index)
315
+ def log_internal(level, index, message = nil, payload = nil, exception = nil)
316
+ # Handle variable number of arguments by detecting exception object and payload hash.
317
+ if exception.nil? && payload.nil? && message.respond_to?(:backtrace) && message.respond_to?(:message)
318
+ exception = message
319
+ message = nil
320
+ elsif exception.nil? && payload && payload.respond_to?(:backtrace) && payload.respond_to?(:message)
321
+ exception = payload
322
+ payload = nil
323
+ elsif payload && !payload.is_a?(Hash)
324
+ message = message.nil? ? payload : "#{message} -- #{payload}"
325
+ payload = nil
326
+ end
327
+
328
+ log = Log.new(name, level, index)
307
329
  should_log =
308
330
  if payload.nil? && exception.nil? && message.is_a?(Hash)
309
- # Check if someone just logged a hash payload instead of meaning to call semantic logger
310
- if message.key?(:message) || message.key?(:payload) || message.key?(:exception) || message.key?(:metric)
311
- log.assign(message)
312
- else
313
- log.assign_positional(nil, message, nil, &block)
314
- end
315
- elsif exception.nil? && message && payload && payload.is_a?(Hash) &&
316
- (payload.key?(:payload) || payload.key?(:exception) || payload.key?(:metric))
317
- log.assign(message: message, **payload)
331
+ # Everything as keyword arguments.
332
+ log.assign(**log.extract_arguments(message))
333
+ elsif exception.nil? && message && payload && payload.is_a?(Hash)
334
+ # Message with keyword arguments as the rest.
335
+ log.assign(message: message, **log.extract_arguments(payload))
318
336
  else
319
- log.assign_positional(message, payload, exception, &block)
337
+ # No keyword arguments.
338
+ log.assign(message: message, payload: payload, exception: exception)
320
339
  end
321
340
 
341
+ # Add result of block to message or payload if not nil
342
+ if block_given?
343
+ result = yield(log)
344
+ if result.is_a?(String)
345
+ log.message = log.message.nil? ? result : "#{log.message} -- #{result}"
346
+ elsif result.is_a?(Hash)
347
+ log.assign_hash(result)
348
+ end
349
+ end
350
+
322
351
  # Log level may change during assign due to :on_exception_level
323
352
  self.log(log) if should_log && should_log?(log)
324
353
  end
@@ -344,18 +373,18 @@ module SemanticLogger
344
373
  yield(params)
345
374
  end
346
375
  end
347
- rescue Exception => exc
348
- exception = exc
376
+ rescue Exception => e
377
+ exception = e
349
378
  ensure
350
379
  # Must use ensure block otherwise a `return` in the yield above will skip the log entry
351
- log = Log.new(name, level, index)
380
+ log = Log.new(name, level, index)
352
381
  exception ||= params[:exception]
353
382
  message = params[:message] if params[:message]
354
383
  duration =
355
384
  if block_given?
356
385
  1_000.0 * (Process.clock_gettime(Process::CLOCK_MONOTONIC) - start)
357
386
  else
358
- params[:duration] || raise('Mandatory block missing when :duration option is not supplied')
387
+ params[:duration] || raise("Mandatory block missing when :duration option is not supplied")
359
388
  end
360
389
 
361
390
  # Extract options after block completes so that block can modify any of the options
@@ -377,6 +406,7 @@ module SemanticLogger
377
406
  # Log level may change during assign due to :on_exception_level
378
407
  self.log(log) if should_log && should_log?(log)
379
408
  raise exception if exception
409
+
380
410
  result
381
411
  end
382
412
  end
@@ -395,8 +425,8 @@ module SemanticLogger
395
425
  start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
396
426
  begin
397
427
  yield
398
- rescue Exception => exc
399
- exception = exc
428
+ rescue Exception => e
429
+ exception = e
400
430
  ensure
401
431
  log = Log.new(name, level, index)
402
432
  # May return false due to elastic logging
@@ -1,8 +1,8 @@
1
- require 'time'
1
+ require "time"
2
2
  module SemanticLogger
3
3
  module Formatters
4
4
  class Base
5
- attr_accessor :time_format, :log_host, :log_application, :log_environment, :precision
5
+ attr_accessor :log, :logger, :time_format, :log_host, :log_application, :log_environment, :precision
6
6
 
7
7
  # Time precision varies by Ruby interpreter
8
8
  # JRuby 9.1.8.0 supports microseconds
@@ -61,10 +61,16 @@ module SemanticLogger
61
61
  format_time(log.time) if time_format
62
62
  end
63
63
 
64
+ # Process ID
65
+ def pid
66
+ $$
67
+ end
68
+
64
69
  private
65
70
 
66
71
  # Return the Time as a formatted string
67
72
  def format_time(time)
73
+ time = time.dup
68
74
  case time_format
69
75
  when :rfc_3339
70
76
  time.utc.to_datetime.rfc3339
@@ -77,7 +83,7 @@ module SemanticLogger
77
83
  when :seconds
78
84
  time.to_f
79
85
  when nil
80
- ''
86
+ ""
81
87
  else
82
88
  time.strftime(time_format)
83
89
  end
@@ -1,8 +1,12 @@
1
- # Load AwesomePrint if available
1
+ # Load Amazing Print, or Awesome Print if available
2
2
  begin
3
- require 'awesome_print'
3
+ require "amazing_print"
4
4
  rescue LoadError
5
- nil
5
+ begin
6
+ require "awesome_print"
7
+ rescue LoadError
8
+ nil
9
+ end
6
10
  end
7
11
 
8
12
  module SemanticLogger
@@ -61,9 +65,9 @@ module SemanticLogger
61
65
  #
62
66
  # Parameters:
63
67
  # ap: [Hash]
64
- # Any valid AwesomePrint option for rendering data.
68
+ # Any valid Amazing Print option for rendering data.
65
69
  # These options can also be changed be creating a `~/.aprc` file.
66
- # See: https://github.com/michaeldv/awesome_print
70
+ # See: https://github.com/amazing-print/amazing_print
67
71
  #
68
72
  # Note: The option :multiline is set to false if not supplied.
69
73
  # Note: Has no effect if Awesome Print is not installed.
@@ -105,14 +109,14 @@ module SemanticLogger
105
109
  def payload
106
110
  return unless log.payload?
107
111
 
108
- if !defined?(AwesomePrint) || !log.payload.respond_to?(:ai)
109
- super
110
- else
112
+ if log.payload.respond_to?(:ai)
111
113
  begin
112
114
  "-- #{log.payload.ai(@ai_options)}"
113
115
  rescue StandardError
114
116
  super
115
117
  end
118
+ else
119
+ super
116
120
  end
117
121
  end
118
122
 
@@ -2,8 +2,6 @@ module SemanticLogger
2
2
  module Formatters
3
3
  # Default non-colored text log output
4
4
  class Default < Base
5
- attr_accessor :log, :logger
6
-
7
5
  # Formatting methods, must return nil, or a string
8
6
  # Nil values are ignored
9
7
 
@@ -12,9 +10,24 @@ module SemanticLogger
12
10
  log.level_to_s
13
11
  end
14
12
 
15
- # Process info
13
+ # Name of the thread that logged the message.
14
+ def thread_name
15
+ format("%.30s", log.thread_name)
16
+ end
17
+
18
+ # Ruby file name and line number that logged the message.
19
+ def file_name_and_line
20
+ file, line = log.file_name_and_line(true)
21
+ "#{file}:#{line}" if file
22
+ end
23
+
24
+ # Returns [String] the available process info
25
+ # Example:
26
+ # [18934:thread_name test_logging.rb:51]
16
27
  def process_info
17
- "[#{log.process_info}]"
28
+ process_id = "#{pid}:" if pid
29
+ fname = file_name_and_line
30
+ fname ? "[#{process_id}#{thread_name} #{fname}]" : "[#{process_id}#{thread_name}]"
18
31
  end
19
32
 
20
33
  # Tags
@@ -67,7 +80,7 @@ module SemanticLogger
67
80
  self.log = log
68
81
  self.logger = logger
69
82
 
70
- [time, level, process_info, tags, named_tags, duration, name, message, payload, exception].compact.join(' ')
83
+ [time, level, process_info, tags, named_tags, duration, name, message, payload, exception].compact.join(" ")
71
84
  end
72
85
  end
73
86
  end
@@ -1,4 +1,4 @@
1
- require 'json'
1
+ require "json"
2
2
 
3
3
  module SemanticLogger
4
4
  module Formatters
@@ -13,8 +13,8 @@ module SemanticLogger
13
13
  end
14
14
 
15
15
  def level
16
- hash['severity'] = log.level
17
- hash['severity_index'] = log.level_index
16
+ hash["severity"] = log.level
17
+ hash["severity_index"] = log.level_index
18
18
  end
19
19
 
20
20
  def process_info
@@ -1,4 +1,4 @@
1
- require 'json'
1
+ require "json"
2
2
  module SemanticLogger
3
3
  module Formatters
4
4
  class Json < Raw
@@ -0,0 +1,72 @@
1
+ require "json"
2
+
3
+ module SemanticLogger
4
+ module Formatters
5
+ # Produces logfmt formatted messages
6
+ #
7
+ # The following fields are extracted from the raw log and included in the formatted message:
8
+ # :timestamp, :level, :name, :message, :duration, :tags, :named_tags
9
+ #
10
+ # E.g.
11
+ # timestamp="2020-07-20T08:32:05.375276Z" level=info name="DefaultTest" base="breakfast" spaces="second breakfast" double_quotes="\"elevensies\"" single_quotes="'lunch'" tag="success"
12
+ #
13
+ # All timestamps are ISO8601 formatteed
14
+ # All user supplied values are escaped and surrounded by double quotes to avoid ambiguious message delimeters
15
+ # `tags` are treated as keys with boolean values. Tag names are not formatted or validated, ensure you use valid logfmt format for tag names.
16
+ # `named_tags` are flattened are merged into the top level message field. Any conflicting fields are overridden.
17
+ # `payload` values take precedence over `tags` and `named_tags`. Any conflicting fields are overridden.
18
+ #
19
+ # Futher Reading https://brandur.org/logfmt
20
+ class Logfmt < Raw
21
+ def initialize(time_format: :iso_8601, time_key: :timestamp, **args)
22
+ super(time_format: time_format, time_key: time_key, **args)
23
+ end
24
+
25
+ def call(log, logger)
26
+ @raw = super(log, logger)
27
+
28
+ raw_to_logfmt
29
+ end
30
+
31
+ private
32
+
33
+ def raw_to_logfmt
34
+ @parsed = @raw.slice(time_key, :level, :name, :message, :duration).merge(tag: "success")
35
+ handle_tags
36
+ handle_payload
37
+ handle_exception
38
+
39
+ flatten_log
40
+ end
41
+
42
+ def handle_tags
43
+ tags = @raw.fetch(:tags){ [] }
44
+ .each_with_object({}){ |tag, accum| accum[tag] = true }
45
+
46
+ @parsed = @parsed.merge(tags)
47
+ .merge(@raw.fetch(:named_tags){ {} })
48
+ end
49
+
50
+ def handle_payload
51
+ return unless @raw.key? :payload
52
+
53
+ @parsed = @parsed.merge(@raw[:payload])
54
+ end
55
+
56
+ def handle_exception
57
+ return unless @raw.key? :exception
58
+
59
+ @parsed[:tag] = "exception"
60
+ @parsed = @parsed.merge(@raw[:exception])
61
+ end
62
+
63
+ def flatten_log
64
+ flattened = @parsed.map do |key, value|
65
+ "#{key}=#{value.to_json}"
66
+ end
67
+
68
+ flattened.join(" ")
69
+ end
70
+ end
71
+ end
72
+ end
@@ -1,9 +1,9 @@
1
- require 'json'
1
+ require "json"
2
2
  module SemanticLogger
3
3
  module Formatters
4
4
  class Raw < Base
5
5
  # Fields are added by populating this hash.
6
- attr_accessor :hash, :log, :logger, :time_key
6
+ attr_accessor :hash, :time_key
7
7
 
8
8
  # By default Raw formatter does not reformat the time
9
9
  def initialize(time_format: :none, time_key: :time, **args)
@@ -37,11 +37,18 @@ module SemanticLogger
37
37
  hash[:level_index] = log.level_index
38
38
  end
39
39
 
40
- # Process info
41
- def process_info
42
- hash[:pid] = $$
40
+ # Process ID
41
+ def pid
42
+ hash[:pid] = super
43
+ end
44
+
45
+ # Name of the thread that logged the message.
46
+ def thread_name
43
47
  hash[:thread] = log.thread_name
48
+ end
44
49
 
50
+ # Ruby file name and line number that logged the message.
51
+ def file_name_and_line
45
52
  file, line = log.file_name_and_line
46
53
  return unless file
47
54
 
@@ -85,6 +92,7 @@ module SemanticLogger
85
92
  # Exception
86
93
  def exception
87
94
  return unless log.exception
95
+
88
96
  root = hash
89
97
  log.each_exception do |exception, i|
90
98
  name = i.zero? ? :exception : :cause
@@ -93,7 +101,7 @@ module SemanticLogger
93
101
  message: exception.message,
94
102
  stack_trace: exception.backtrace
95
103
  }
96
- root = root[name]
104
+ root = root[name]
97
105
  end
98
106
  end
99
107
 
@@ -109,7 +117,23 @@ module SemanticLogger
109
117
  self.log = log
110
118
  self.logger = logger
111
119
 
112
- host; application; environment; time; level; process_info; duration; tags; named_tags; name; message; payload; exception; metric
120
+ host
121
+ application
122
+ environment
123
+ time
124
+ level
125
+ pid
126
+ thread_name
127
+ file_name_and_line
128
+ duration
129
+ tags
130
+ named_tags
131
+ name
132
+ message
133
+ payload
134
+ exception
135
+ metric
136
+
113
137
  hash
114
138
  end
115
139
  end
@@ -1,13 +1,13 @@
1
- require 'json'
1
+ require "json"
2
2
  module SemanticLogger
3
3
  module Formatters
4
4
  class Signalfx < Base
5
- attr_accessor :token, :dimensions, :hash, :log, :logger, :gauge_name, :counter_name
5
+ attr_accessor :token, :dimensions, :hash, :gauge_name, :counter_name
6
6
 
7
7
  def initialize(token:,
8
8
  dimensions: nil,
9
- gauge_name: 'Application.average',
10
- counter_name: 'Application.counter',
9
+ gauge_name: "Application.average",
10
+ counter_name: "Application.counter",
11
11
  time_format: :ms,
12
12
  **args)
13
13
 
@@ -23,19 +23,19 @@ module SemanticLogger
23
23
  # Strip leading '/'
24
24
  # Convert remaining '/' to '.'
25
25
  def metric
26
- name = log.metric.to_s.sub(/\A\/+/, '')
26
+ name = log.metric.to_s.sub(%r{\A/+}, "")
27
27
  if log.dimensions
28
- name.tr!('/', '.')
28
+ name.tr!("/", ".")
29
29
  hash[:metric] = name
30
30
  else
31
31
  # Extract class and action from metric name
32
- names = name.split('/')
32
+ names = name.split("/")
33
33
  h = (hash[:dimensions] ||= {})
34
34
  if names.size > 1
35
35
  h[:action] = names.pop
36
- h[:class] = names.join('::')
36
+ h[:class] = names.join("::")
37
37
  else
38
- h[:class] = 'Unknown'
38
+ h[:class] = "Unknown"
39
39
  h[:action] = names.first || log.metric
40
40
  end
41
41
 
@@ -67,6 +67,7 @@ module SemanticLogger
67
67
  name = name.to_sym
68
68
  value = value.to_s
69
69
  next if value.empty?
70
+
70
71
  h[name] = value if dimensions&.include?(name)
71
72
  end
72
73
  end
@@ -1,13 +1,14 @@
1
1
  begin
2
- require 'syslog_protocol'
2
+ require "syslog_protocol"
3
3
  rescue LoadError
4
- raise LoadError.new('Gem syslog_protocol is required for remote logging using the Syslog protocol. Please add the gem "syslog_protocol" to your Gemfile.')
4
+ raise LoadError,
5
+ 'Gem syslog_protocol is required for remote logging using the Syslog protocol. Please add the gem "syslog_protocol" to your Gemfile.'
5
6
  end
6
7
 
7
8
  module SemanticLogger
8
9
  module Formatters
9
10
  class Syslog < Default
10
- attr_accessor :level_map, :facility
11
+ attr_accessor :level_map, :facility, :max_size
11
12
 
12
13
  # Default level map for every log level
13
14
  #
@@ -50,9 +51,10 @@ module SemanticLogger
50
51
  # Example:
51
52
  # # Change the warn level to LOG_NOTICE level instead of a the default of LOG_WARNING.
52
53
  # SemanticLogger.add_appender(appender: :syslog, level_map: {warn: ::Syslog::LOG_NOTICE})
53
- def initialize(facility: ::Syslog::LOG_USER, level_map: LevelMap.new)
54
+ def initialize(facility: ::Syslog::LOG_USER, level_map: LevelMap.new, max_size: Integer)
54
55
  @facility = facility
55
56
  @level_map = level_map.is_a?(LevelMap) ? level_map : LevelMap.new(level_map)
57
+ @max_size = max_size
56
58
  super()
57
59
  end
58
60
 
@@ -73,11 +75,11 @@ module SemanticLogger
73
75
  packet = SyslogProtocol::Packet.new
74
76
  packet.hostname = logger.host
75
77
  packet.facility = facility
76
- packet.tag = logger.application.delete(' ')
78
+ packet.tag = logger.application.delete(" ")
77
79
  packet.content = message
78
80
  packet.time = log.time
79
81
  packet.severity = level_map[log.level]
80
- packet.to_s
82
+ packet.assemble(@max_size)
81
83
  end
82
84
  end
83
85
  end