semantic_logger 4.1.1 → 4.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/lib/semantic_logger.rb +6 -13
  3. data/lib/semantic_logger/ansi_colors.rb +10 -10
  4. data/lib/semantic_logger/appender.rb +42 -26
  5. data/lib/semantic_logger/appender/async.rb +179 -0
  6. data/lib/semantic_logger/appender/async_batch.rb +95 -0
  7. data/lib/semantic_logger/appender/bugsnag.rb +2 -1
  8. data/lib/semantic_logger/appender/elasticsearch.rb +113 -81
  9. data/lib/semantic_logger/appender/elasticsearch_http.rb +1 -3
  10. data/lib/semantic_logger/appender/file.rb +1 -3
  11. data/lib/semantic_logger/appender/graylog.rb +6 -5
  12. data/lib/semantic_logger/appender/honeybadger.rb +0 -2
  13. data/lib/semantic_logger/appender/http.rb +25 -10
  14. data/lib/semantic_logger/appender/kafka.rb +1 -3
  15. data/lib/semantic_logger/appender/mongodb.rb +1 -3
  16. data/lib/semantic_logger/appender/new_relic.rb +7 -3
  17. data/lib/semantic_logger/appender/sentry.rb +6 -7
  18. data/lib/semantic_logger/appender/splunk.rb +1 -2
  19. data/lib/semantic_logger/appender/splunk_http.rb +3 -4
  20. data/lib/semantic_logger/appender/syslog.rb +1 -3
  21. data/lib/semantic_logger/appender/tcp.rb +7 -9
  22. data/lib/semantic_logger/appender/udp.rb +0 -2
  23. data/lib/semantic_logger/appender/wrapper.rb +0 -2
  24. data/lib/semantic_logger/base.rb +76 -19
  25. data/lib/semantic_logger/formatters.rb +37 -0
  26. data/lib/semantic_logger/formatters/base.rb +10 -3
  27. data/lib/semantic_logger/formatters/json.rb +2 -6
  28. data/lib/semantic_logger/formatters/one_line.rb +18 -0
  29. data/lib/semantic_logger/formatters/raw.rb +8 -2
  30. data/lib/semantic_logger/formatters/signalfx.rb +169 -0
  31. data/lib/semantic_logger/log.rb +23 -14
  32. data/lib/semantic_logger/loggable.rb +88 -15
  33. data/lib/semantic_logger/logger.rb +0 -20
  34. data/lib/semantic_logger/metric/new_relic.rb +75 -0
  35. data/lib/semantic_logger/metric/signalfx.rb +123 -0
  36. data/lib/semantic_logger/{metrics → metric}/statsd.rb +20 -8
  37. data/lib/semantic_logger/processor.rb +67 -169
  38. data/lib/semantic_logger/semantic_logger.rb +7 -31
  39. data/lib/semantic_logger/subscriber.rb +32 -36
  40. data/lib/semantic_logger/utils.rb +47 -0
  41. data/lib/semantic_logger/version.rb +1 -1
  42. data/test/appender/async_batch_test.rb +61 -0
  43. data/test/appender/async_test.rb +45 -0
  44. data/test/appender/elasticsearch_http_test.rb +3 -3
  45. data/test/appender/elasticsearch_test.rb +211 -49
  46. data/test/appender/file_test.rb +9 -8
  47. data/test/appender/mongodb_test.rb +3 -3
  48. data/test/appender/newrelic_rpm.rb +6 -0
  49. data/test/appender/sentry_test.rb +3 -1
  50. data/test/appender/wrapper_test.rb +29 -0
  51. data/test/concerns/compatibility_test.rb +64 -60
  52. data/test/debug_as_trace_logger_test.rb +62 -77
  53. data/test/formatters/one_line_test.rb +61 -0
  54. data/test/formatters/signalfx_test.rb +200 -0
  55. data/test/formatters_test.rb +36 -0
  56. data/test/in_memory_appender.rb +9 -0
  57. data/test/in_memory_appender_helper.rb +43 -0
  58. data/test/in_memory_batch_appender.rb +9 -0
  59. data/test/in_memory_metrics_appender.rb +14 -0
  60. data/test/loggable_test.rb +15 -30
  61. data/test/logger_test.rb +181 -135
  62. data/test/measure_test.rb +212 -113
  63. data/test/metric/new_relic_test.rb +36 -0
  64. data/test/metric/signalfx_test.rb +78 -0
  65. data/test/semantic_logger_test.rb +58 -65
  66. data/test/test_helper.rb +19 -2
  67. metadata +33 -7
  68. data/lib/semantic_logger/metrics/new_relic.rb +0 -30
  69. data/lib/semantic_logger/metrics/udp.rb +0 -80
  70. data/test/mock_logger.rb +0 -29
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 4cf47326afecad1dcc0ecd8cffa732b51c023356
4
- data.tar.gz: d8d72838b88abb13b94e1433d1676fff824eda26
3
+ metadata.gz: b67fae09283837194e489b469dace9f3b5103901
4
+ data.tar.gz: fc112ededd82585ff40307585e65769ab92601ce
5
5
  SHA512:
6
- metadata.gz: fbefeb4f49ea32b347f09997baf3c9ca7138177c1ae9ea307088ca900cc74ad0fd8d1af8fba0479fcfbd16c37ec9df24aeb1e0fbe988eef90b3fce0e92933652
7
- data.tar.gz: 0e168803566680fa7b20843df409e55bc4767b90a23c89249a6d7d7237f1b683d135088fc250a192d37726930ebee972844cb5b002e39c68b244eec8ad271d25
6
+ metadata.gz: a548305f79d04f28a1c2989132eb148f29da9415a5a3bc06b7879d7d95cabe94627533ad56c50a925137f3dfeca1dec62c43707472ade54eb331fd9f764f91a4
7
+ data.tar.gz: 7bf65721bfb19e865601ef929774877ab514607d86f150e86bb59be969cbecf080d2330e83c8ae2ba4aa2c44ea9b428773e5a791e281a142aeb20b4028036492
@@ -8,29 +8,22 @@ module SemanticLogger
8
8
  autoload :Appender, 'semantic_logger/appender'
9
9
  autoload :Base, 'semantic_logger/base'
10
10
  autoload :DebugAsTraceLogger, 'semantic_logger/debug_as_trace_logger'
11
+ autoload :Formatters, 'semantic_logger/formatters'
11
12
  autoload :Log, 'semantic_logger/log'
12
13
  autoload :Logger, 'semantic_logger/logger'
13
14
  autoload :Loggable, 'semantic_logger/loggable'
14
15
  autoload :Processor, 'semantic_logger/processor'
15
16
  autoload :Subscriber, 'semantic_logger/subscriber'
17
+ autoload :Utils, 'semantic_logger/utils'
16
18
 
17
19
  module Concerns
18
20
  autoload :Compatibility, 'semantic_logger/concerns/compatibility'
19
21
  end
20
22
 
21
- module Formatters
22
- autoload :Base, 'semantic_logger/formatters/base'
23
- autoload :Color, 'semantic_logger/formatters/color'
24
- autoload :Default, 'semantic_logger/formatters/default'
25
- autoload :Json, 'semantic_logger/formatters/json'
26
- autoload :Raw, 'semantic_logger/formatters/raw'
27
- autoload :Syslog, 'semantic_logger/formatters/syslog'
28
- end
29
-
30
- module Metrics
31
- autoload :NewRelic, 'semantic_logger/metrics/new_relic'
32
- autoload :Statsd, 'semantic_logger/metrics/statsd'
33
- autoload :Udp, 'semantic_logger/metrics/udp'
23
+ module Metric
24
+ autoload :NewRelic, 'semantic_logger/metric/new_relic'
25
+ autoload :Signalfx, 'semantic_logger/metric/signalfx'
26
+ autoload :Statsd, 'semantic_logger/metric/statsd'
34
27
  end
35
28
 
36
29
  if defined?(JRuby)
@@ -1,16 +1,16 @@
1
1
  module SemanticLogger
2
2
  # Formatting & colors used by optional color formatter
3
3
  module AnsiColors
4
- CLEAR = "\e[0m"
5
- BOLD = "\e[1m"
6
- BLACK = "\e[30m"
7
- RED = "\e[31m"
8
- GREEN = "\e[32m"
9
- YELLOW = "\e[33m"
10
- BLUE = "\e[34m"
11
- MAGENTA = "\e[35m"
12
- CYAN = "\e[36m"
13
- WHITE = "\e[37m"
4
+ CLEAR = "\e[0m"
5
+ BOLD = "\e[1m"
6
+ BLACK = "\e[30m"
7
+ RED = "\e[31m"
8
+ GREEN = "\e[32m"
9
+ YELLOW = "\e[33m"
10
+ BLUE = "\e[34m"
11
+ MAGENTA = "\e[35m"
12
+ CYAN = "\e[36m"
13
+ WHITE = "\e[37m"
14
14
 
15
15
  # DEPRECATED - NOT USED
16
16
  LEVEL_MAP = {
@@ -1,6 +1,8 @@
1
1
  module SemanticLogger
2
2
  module Appender
3
3
  # @formatter:off
4
+ autoload :Async, 'semantic_logger/appender/async'
5
+ autoload :AsyncBatch, 'semantic_logger/appender/async_batch'
4
6
  autoload :Bugsnag, 'semantic_logger/appender/bugsnag'
5
7
  autoload :Elasticsearch, 'semantic_logger/appender/elasticsearch'
6
8
  autoload :ElasticsearchHttp, 'semantic_logger/appender/elasticsearch_http'
@@ -34,45 +36,59 @@ module SemanticLogger
34
36
  end
35
37
 
36
38
  # Returns [SemanticLogger::Subscriber] appender for the supplied options
37
- def self.create(options, &block)
39
+ def self.factory(options, &block)
40
+ options = options.dup
41
+ async = options.delete(:async)
42
+ batch = options.delete(:batch)
43
+
44
+ # Extract batch and async options
45
+ proxy_options = {}
46
+ ASYNC_OPTION_KEYS.each { |key| proxy_options[key] = options.delete(key) if options.key?(key) }
47
+
48
+ appender = build(options, &block)
49
+
50
+ # If appender implements #batch, then it should use the batch proxy by default.
51
+ batch = true if batch.nil? && appender.respond_to?(:batch)
52
+
53
+ if batch == true
54
+ proxy_options[:appender] = appender
55
+ Appender::AsyncBatch.new(proxy_options)
56
+ elsif async == true
57
+ proxy_options[:appender] = appender
58
+ Appender::Async.new(proxy_options)
59
+ else
60
+ appender
61
+ end
62
+ end
63
+
64
+ private
65
+
66
+ ASYNC_OPTION_KEYS = [:max_queue_size, :lag_threshold_s, :batch_size, :batch_seconds, :lag_check_interval]
67
+
68
+ # Returns [Subscriber] instance from the supplied options.
69
+ def self.build(options, &block)
38
70
  if options[:io] || options[:file_name]
39
71
  SemanticLogger::Appender::File.new(options, &block)
40
72
  elsif appender = options.delete(:appender)
41
73
  if appender.is_a?(Symbol)
42
- constantize_symbol(appender).new(options)
74
+ SemanticLogger::Utils.constantize_symbol(appender).new(options)
43
75
  elsif appender.is_a?(Subscriber)
44
76
  appender
45
77
  else
46
78
  raise(ArgumentError, "Parameter :appender must be either a Symbol or an object derived from SemanticLogger::Subscriber, not: #{appender.inspect}")
47
79
  end
48
- elsif options[:logger]
49
- SemanticLogger::Appender::Wrapper.new(options, &block)
50
- end
51
- end
52
-
53
- def self.constantize_symbol(symbol, namespace = 'SemanticLogger::Appender')
54
- klass = "#{namespace}::#{camelize(symbol.to_s)}"
55
- begin
56
- if RUBY_VERSION.to_i >= 2
57
- Object.const_get(klass)
80
+ elsif appender = options.delete(:metric)
81
+ if appender.is_a?(Symbol)
82
+ SemanticLogger::Utils.constantize_symbol(appender, 'SemanticLogger::Metric').new(options)
83
+ elsif appender.is_a?(Subscriber)
84
+ appender
58
85
  else
59
- klass.split('::').inject(Object) { |o, name| o.const_get(name) }
86
+ raise(ArgumentError, "Parameter :metric must be either a Symbol or an object derived from SemanticLogger::Subscriber, not: #{appender.inspect}")
60
87
  end
61
- rescue NameError
62
- raise(ArgumentError, "Could not convert symbol: #{symbol} to a class in: #{namespace}. Looking for: #{klass}")
88
+ elsif options[:logger]
89
+ SemanticLogger::Appender::Wrapper.new(options, &block)
63
90
  end
64
91
  end
65
92
 
66
- private
67
-
68
- # Borrow from Rails, when not running Rails
69
- def self.camelize(term)
70
- string = term.to_s
71
- string = string.sub(/^[a-z\d]*/) { |match| match.capitalize }
72
- string.gsub!(/(?:_|(\/))([a-z\d]*)/i) { "#{$1}#{$2.capitalize}" }
73
- string.gsub!('/'.freeze, '::'.freeze)
74
- string
75
- end
76
-
77
93
  end
78
94
  end
@@ -0,0 +1,179 @@
1
+ require 'forwardable'
2
+
3
+ module SemanticLogger
4
+ module Appender
5
+ # Allow any appender to run asynchronously in a separate thread.
6
+ class Async
7
+ extend Forwardable
8
+
9
+ attr_accessor :logger, :lag_check_interval, :lag_threshold_s
10
+ attr_reader :queue, :appender
11
+
12
+ # Forward methods that can be called directly
13
+ def_delegator :@appender, :name
14
+ def_delegator :@appender, :should_log?
15
+ def_delegator :@appender, :filter
16
+ def_delegator :@appender, :host
17
+ def_delegator :@appender, :application
18
+ def_delegator :@appender, :level
19
+ def_delegator :@appender, :level=
20
+ def_delegator :@appender, :logger
21
+
22
+ # Appender proxy to allow an existing appender to run asynchronously in a separate thread.
23
+ #
24
+ # Parameters:
25
+ # name: [String]
26
+ # Name to use for the log thread and the log name when logging any errors from this appender.
27
+ #
28
+ # max_queue_size: [Integer]
29
+ # The maximum number of log messages to hold on the queue before blocking attempts to add to the queue.
30
+ # -1: The queue size is uncapped and will never block no matter how long the queue is.
31
+ # Default: 10,000
32
+ #
33
+ # lag_threshold_s [Float]
34
+ # Log a warning when a log message has been on the queue for longer than this period in seconds.
35
+ # Default: 30
36
+ #
37
+ # lag_check_interval: [Integer]
38
+ # Number of messages to process before checking for slow logging.
39
+ # Default: 1,000
40
+ def initialize(appender:,
41
+ name: appender.class.name,
42
+ max_queue_size: 10_000,
43
+ lag_check_interval: 1_000,
44
+ lag_threshold_s: 30)
45
+
46
+ @appender = appender
47
+ @lag_check_interval = lag_check_interval
48
+ @lag_threshold_s = lag_threshold_s
49
+
50
+ if max_queue_size == -1
51
+ @queue = Queue.new
52
+ @capped = false
53
+ else
54
+ @queue = SizedQueue.new(max_queue_size)
55
+ @capped = true
56
+ end
57
+ thread
58
+ end
59
+
60
+ # Returns [true|false] if the queue has a capped size.
61
+ def capped?
62
+ @capped
63
+ end
64
+
65
+ # Returns [Thread] the worker thread.
66
+ #
67
+ # Starts the worker thread if not running.
68
+ def thread
69
+ return @thread if @thread && @thread.alive?
70
+ @thread = Thread.new { process }
71
+ end
72
+
73
+ # Returns true if the worker thread is active
74
+ def active?
75
+ @thread && @thread.alive?
76
+ end
77
+
78
+ # Add log message for processing.
79
+ def log(log)
80
+ queue << log
81
+ end
82
+
83
+ # Flush all queued log entries disk, database, etc.
84
+ # All queued log messages are written and then each appender is flushed in turn.
85
+ def flush
86
+ submit_request(:flush)
87
+ end
88
+
89
+ # Close all appenders and flush any outstanding messages.
90
+ def close
91
+ # TODO: Prevent new close requests once this appender has been closed.
92
+ submit_request(:close)
93
+ end
94
+
95
+ private
96
+
97
+ # Separate thread for batching up log messages before writing.
98
+ def process
99
+ # This thread is designed to never go down unless the main thread terminates
100
+ # or the appender is closed.
101
+ Thread.current.name = logger.name
102
+ logger.trace "Async: Appender thread active"
103
+ begin
104
+ process_messages
105
+ rescue StandardError => exception
106
+ # This block may be called after the file handles have been released by Ruby
107
+ logger.error('Async: Restarting due to exception', exception) rescue nil
108
+ retry
109
+ rescue Exception => exception
110
+ # This block may be called after the file handles have been released by Ruby
111
+ logger.error('Async: Stopping due to fatal exception', exception) rescue nil
112
+ ensure
113
+ @thread = nil
114
+ # This block may be called after the file handles have been released by Ruby
115
+ logger.trace('Async: Thread has stopped') rescue nil
116
+ end
117
+ end
118
+
119
+ def process_messages
120
+ count = 0
121
+ while message = queue.pop
122
+ if message.is_a?(Log)
123
+ appender.log(message)
124
+ count += 1
125
+ # Check every few log messages whether this appender thread is falling behind
126
+ if count > lag_check_interval
127
+ check_lag(message)
128
+ count = 0
129
+ end
130
+ else
131
+ break unless process_message(message)
132
+ end
133
+ end
134
+ end
135
+
136
+ # Returns false when message processing should be stopped
137
+ def process_message(message)
138
+ case message[:command]
139
+ when :flush
140
+ appender.flush
141
+ message[:reply_queue] << true if message[:reply_queue]
142
+ when :close
143
+ appender.close
144
+ message[:reply_queue] << true if message[:reply_queue]
145
+ return false
146
+ else
147
+ logger.warn "Async: Appender thread: Ignoring unknown command: #{message[:command]}"
148
+ end
149
+ true
150
+ end
151
+
152
+ def check_lag(log)
153
+ if (diff = Time.now - log.time) > lag_threshold_s
154
+ logger.warn "Async: Appender thread has fallen behind by #{diff} seconds with #{queue.size} messages queued up. Consider reducing the log level or changing the appenders"
155
+ end
156
+ end
157
+
158
+ # Submit command and wait for reply
159
+ def submit_request(command)
160
+ return false unless active?
161
+
162
+ queue_size = queue.size
163
+ msg = "Async: Queued log messages: #{queue_size}, running command: #{command}"
164
+ if queue_size > 1_000
165
+ logger.warn msg
166
+ elsif queue_size > 100
167
+ logger.info msg
168
+ elsif queue_size > 0
169
+ logger.trace msg
170
+ end
171
+
172
+ reply_queue = Queue.new
173
+ queue << {command: command, reply_queue: reply_queue}
174
+ reply_queue.pop
175
+ end
176
+
177
+ end
178
+ end
179
+ end
@@ -0,0 +1,95 @@
1
+ require 'concurrent'
2
+
3
+ module SemanticLogger
4
+ module Appender
5
+ # Log asynchronously in batches using a separate thread.
6
+ #
7
+ # Log messages are grouped up and only logged when:
8
+ # * The number of queued messages is exceeded.
9
+ # * Or, the appropriate amount of time has passed since the last batch was sent.
10
+ class AsyncBatch < Async
11
+ attr_accessor :batch_size, :batch_seconds
12
+ attr_reader :signal
13
+
14
+ # Batching Appender proxy for appenders that support batches.
15
+ #
16
+ # Parameters:
17
+ # batch_size: [Integer]
18
+ # Maximum number of messages to batch up before sending.
19
+ # Default: 300
20
+ #
21
+ # batch_seconds: [Integer]
22
+ # Maximum number of seconds between sending batches.
23
+ # Default: 5
24
+ #
25
+ # See SemanticLogger::Appender::Async for other paramaters
26
+ #
27
+ # Note:
28
+ # * `lag_check_interval` is not applicable to batches, since the first message of every batch
29
+ # is the oldest and is always checked to see if the lag interval has been exceeded.
30
+ def initialize(appender:,
31
+ name: appender.class.name,
32
+ max_queue_size: 10_000,
33
+ lag_threshold_s: 30,
34
+ batch_size: 300,
35
+ batch_seconds: 5)
36
+
37
+ @batch_size = batch_size
38
+ @batch_seconds = batch_seconds
39
+ @signal = Concurrent::Event.new
40
+ super(
41
+ appender: appender,
42
+ name: name,
43
+ max_queue_size: max_queue_size,
44
+ lag_threshold_s: lag_threshold_s
45
+ )
46
+
47
+ raise(ArgumentError, "#{appender.class.name} does not support batching. It must implement #batch") unless appender.respond_to?(:batch)
48
+ end
49
+
50
+ # Add log message for processing.
51
+ def log(log)
52
+ result = super(log)
53
+ # Wake up the processing thread since the number of queued messages has been exceeded.
54
+ signal.set if queue.size >= batch_size
55
+ result
56
+ end
57
+
58
+ private
59
+
60
+ # Separate thread for batching up log messages before writing.
61
+ def process_messages
62
+ loop do
63
+ # Wait for batch interval or number of messages to be exceeded.
64
+ signal.wait(batch_seconds)
65
+
66
+ logs = []
67
+ first = true
68
+ message_count = queue.length
69
+ message_count.times do
70
+ # Queue#pop(true) raises an exception when there are no more messages, which is considered expensive.
71
+ message = queue.pop
72
+ if message.is_a?(Log)
73
+ logs << message
74
+ if first
75
+ check_lag(message)
76
+ first = false
77
+ end
78
+ else
79
+ process_message(message)
80
+ end
81
+ end
82
+ appender.batch(logs) if logs.size > 0
83
+ signal.reset unless queue.size >= batch_size
84
+ end
85
+ end
86
+
87
+ def submit_request(command)
88
+ # Wake up the processing thread to process this command immediately.
89
+ signal.set
90
+ super
91
+ end
92
+
93
+ end
94
+ end
95
+ end
@@ -48,13 +48,14 @@ class SemanticLogger::Appender::Bugsnag < SemanticLogger::Subscriber
48
48
 
49
49
  # Send an error notification to Bugsnag
50
50
  def log(log)
51
- return false unless should_log?(log)
52
51
  # Ignore logs coming from Bugsnag itself
53
52
  return false if log.name == 'Bugsnag'
54
53
 
55
54
  # Send error messages as Runtime exceptions
56
55
  exception =
57
56
  if log.exception
57
+ # Manually constructed Exception, without a backtrace.
58
+ log.exception.set_backtrace(log.backtrace) if !log.exception.backtrace && log.backtrace
58
59
  log.exception
59
60
  else
60
61
  error = RuntimeError.new(log.message)