semantic_logger 4.0.0 → 4.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +55 -8
  3. data/lib/semantic_logger.rb +1 -2
  4. data/lib/semantic_logger/ansi_colors.rb +1 -2
  5. data/lib/semantic_logger/appender.rb +17 -15
  6. data/lib/semantic_logger/appender/bugsnag.rb +5 -4
  7. data/lib/semantic_logger/appender/elasticsearch.rb +102 -16
  8. data/lib/semantic_logger/appender/elasticsearch_http.rb +76 -0
  9. data/lib/semantic_logger/appender/file.rb +9 -25
  10. data/lib/semantic_logger/appender/graylog.rb +43 -38
  11. data/lib/semantic_logger/appender/honeybadger.rb +3 -5
  12. data/lib/semantic_logger/appender/http.rb +12 -15
  13. data/lib/semantic_logger/appender/kafka.rb +183 -0
  14. data/lib/semantic_logger/appender/mongodb.rb +3 -3
  15. data/lib/semantic_logger/appender/new_relic.rb +3 -7
  16. data/lib/semantic_logger/appender/sentry.rb +2 -5
  17. data/lib/semantic_logger/appender/splunk.rb +7 -10
  18. data/lib/semantic_logger/appender/splunk_http.rb +16 -16
  19. data/lib/semantic_logger/appender/syslog.rb +43 -122
  20. data/lib/semantic_logger/appender/tcp.rb +28 -9
  21. data/lib/semantic_logger/appender/udp.rb +4 -7
  22. data/lib/semantic_logger/appender/wrapper.rb +3 -7
  23. data/lib/semantic_logger/base.rb +47 -7
  24. data/lib/semantic_logger/formatters/base.rb +29 -10
  25. data/lib/semantic_logger/formatters/color.rb +75 -45
  26. data/lib/semantic_logger/formatters/default.rb +53 -28
  27. data/lib/semantic_logger/formatters/json.rb +7 -8
  28. data/lib/semantic_logger/formatters/raw.rb +97 -1
  29. data/lib/semantic_logger/formatters/syslog.rb +46 -80
  30. data/lib/semantic_logger/formatters/syslog_cee.rb +57 -0
  31. data/lib/semantic_logger/log.rb +17 -67
  32. data/lib/semantic_logger/logger.rb +17 -27
  33. data/lib/semantic_logger/processor.rb +70 -46
  34. data/lib/semantic_logger/semantic_logger.rb +130 -69
  35. data/lib/semantic_logger/subscriber.rb +18 -32
  36. data/lib/semantic_logger/version.rb +1 -1
  37. data/test/appender/elasticsearch_http_test.rb +75 -0
  38. data/test/appender/elasticsearch_test.rb +34 -27
  39. data/test/appender/file_test.rb +2 -2
  40. data/test/appender/honeybadger_test.rb +1 -1
  41. data/test/appender/kafka_test.rb +36 -0
  42. data/test/appender/new_relic_test.rb +1 -1
  43. data/test/appender/sentry_test.rb +1 -1
  44. data/test/appender/syslog_test.rb +2 -2
  45. data/test/appender/wrapper_test.rb +1 -1
  46. data/test/formatters/color_test.rb +154 -0
  47. data/test/formatters/default_test.rb +176 -0
  48. data/test/loggable_test.rb +1 -1
  49. data/test/logger_test.rb +47 -4
  50. data/test/measure_test.rb +2 -2
  51. data/test/semantic_logger_test.rb +34 -6
  52. data/test/test_helper.rb +8 -0
  53. metadata +14 -3
@@ -9,9 +9,11 @@ module SemanticLogger
9
9
  # Create a File Logger appender instance.
10
10
  #
11
11
  # Parameters
12
- # :file_name [String|IO]
12
+ # :file_name [String]
13
13
  # Name of file to write to.
14
- # Or, an IO stream to which to write the log message to.
14
+ # Or,
15
+ # :io [IO]
16
+ # An IO stream to which to write the log messages to.
15
17
  #
16
18
  # :level [:trace | :debug | :info | :warn | :error | :fatal]
17
19
  # Override the log level for this appender.
@@ -58,34 +60,16 @@ module SemanticLogger
58
60
  #
59
61
  # logger = SemanticLogger['test']
60
62
  # logger.info 'Hello World'
61
- def initialize(options={}, deprecated_level = nil, deprecated_filter = nil, &block)
62
- # Old style arguments: (file_name, level=nil, filter=nil, &block)
63
- options =
64
- if options.is_a?(Hash)
65
- options.dup
66
- else
67
- file_name = options
68
- opts = {}
69
- if file_name.respond_to?(:write) && file_name.respond_to?(:close)
70
- opts[:io] = file_name
71
- else
72
- opts[:file_name] = file_name
73
- end
74
- opts[:level] = deprecated_level if deprecated_level
75
- opts[:filter] = deprecated_filter if deprecated_filter
76
- opts
77
- end
78
-
79
- if io = options.delete(:io)
63
+ def initialize(io: nil, file_name: nil, level: nil, formatter: nil, filter: nil, application: nil, host: nil, &block)
64
+ if io
80
65
  @log = io
81
66
  else
82
- @file_name = options.delete(:file_name)
83
- raise 'SemanticLogging::Appender::File missing mandatory parameter :file_name or :io' unless @file_name
67
+ @file_name = file_name
68
+ raise 'SemanticLogging::Appender::File missing mandatory parameter :file_name or :io' unless file_name
84
69
  reopen
85
70
  end
86
71
 
87
- # Set the log level and formatter if supplied
88
- super(options, &block)
72
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
89
73
  end
90
74
 
91
75
  # After forking an active process call #reopen to re-open
@@ -20,16 +20,25 @@ end
20
20
  # `duration`, `level`, `message`, `metric`, `name`, `tags
21
21
  class SemanticLogger::Appender::Graylog < SemanticLogger::Subscriber
22
22
  # Map Semantic Logger levels to Graylog levels
23
- LEVEL_MAP = {
24
- fatal: GELF::FATAL,
25
- error: GELF::ERROR,
26
- warn: GELF::WARN,
27
- info: GELF::INFO,
28
- debug: GELF::DEBUG,
29
- trace: GELF::DEBUG
30
- }
23
+ class LevelMap
24
+ attr_accessor :trace, :debug, :info, :warn, :error, :fatal
31
25
 
32
- attr_reader :notifier
26
+ def initialize(trace: GELF::DEBUG, debug: GELF::DEBUG, info: GELF::INFO, warn: GELF::WARN, error: GELF::ERROR, fatal: GELF::FATAL)
27
+ @trace = trace
28
+ @debug = debug
29
+ @info = info
30
+ @warn = warn
31
+ @error = error
32
+ @fatal = fatal
33
+ end
34
+
35
+ def [](level)
36
+ public_send(level)
37
+ end
38
+ end
39
+
40
+ attr_accessor :url, :max_size, :gelf_options, :level_map
41
+ attr_reader :notifier, :server, :port, :protocol
33
42
 
34
43
  # Create Graylog log appender.
35
44
  #
@@ -46,6 +55,9 @@ class SemanticLogger::Appender::Graylog < SemanticLogger::Subscriber
46
55
  # Max udp packet size. Ignored when protocol is :tcp
47
56
  # Default: "WAN"
48
57
  #
58
+ # gelf_options: [Hash]
59
+ # Custom gelf options. See Graylog documentation.
60
+ #
49
61
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
50
62
  # Override the log level for this appender.
51
63
  # Default: SemanticLogger.default_level
@@ -68,46 +80,44 @@ class SemanticLogger::Appender::Graylog < SemanticLogger::Subscriber
68
80
  # application: [String]
69
81
  # Name of this application to appear in log messages.
70
82
  # Default: SemanticLogger.application
71
- def initialize(options = {}, &block)
72
- options = options.dup
73
- @url = options.delete(:url) || 'udp://localhost:12201'
74
- @max_size = options.delete(:max_size) || 'WAN'
75
-
76
- uri = URI.parse(@url)
77
- @server = uri.host
78
- @port = uri.port
79
- protocol = uri.scheme.to_sym
80
-
81
- raise(ArgumentError, "Invalid protocol value: #{protocol}. Must be :udp or :tcp") unless [:udp, :tcp].include?(protocol)
83
+ def initialize(url: 'udp://localhost:12201', max_size: 'WAN', gelf_options: {}, level_map: LevelMap.new,
84
+ level: nil, formatter: nil, filter: nil, application: nil, host: nil, &block)
82
85
 
83
- options[:protocol] = protocol == :tcp ? GELF::Protocol::TCP : GELF::Protocol::UDP
86
+ @url = url
87
+ @max_size = max_size
88
+ @gelf_options = gelf_options
89
+ @level_map = level_map.is_a?(LevelMap) ? level_map : LevelMap.new(level_map)
84
90
 
85
- @gelf_options = options
86
- options = extract_subscriber_options!(options)
87
-
88
- super(options, &block)
91
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
89
92
  reopen
90
93
  end
91
94
 
92
95
  # Re-open after process fork
93
96
  def reopen
94
- @gelf_options[:facility] = application
95
- @notifier = GELF::Notifier.new(@server, @port, @max_size, @gelf_options)
97
+ uri = URI.parse(@url)
98
+ @server = uri.host
99
+ @port = uri.port
100
+ @protocol = uri.scheme.to_sym
101
+
102
+ raise(ArgumentError, "Invalid protocol value: #{@protocol}. Must be :udp or :tcp") unless [:udp, :tcp].include?(@protocol)
103
+
104
+ gelf_options[:protocol] ||= (@protocol == :tcp ? GELF::Protocol::TCP : GELF::Protocol::UDP)
105
+ gelf_options[:facility] ||= application
106
+
107
+ @notifier = GELF::Notifier.new(server, port, max_size, gelf_options)
96
108
  @notifier.collect_file_and_line = false
97
109
  end
98
110
 
99
111
  # Returns [Hash] of parameters to send
100
112
  def call(log, logger)
101
- h = log.to_h(host, application)
113
+ h = SemanticLogger::Formatters::Raw.new.call(log, logger)
102
114
  h.delete(:time)
103
- h.delete(:message) if log.message
104
115
 
105
- short_message = log.message || log.exception.message
116
+ h[:short_message] = h.delete(:message) || log.exception.message
106
117
  h[:timestamp] = log.time.utc.to_f
107
- h[:level] = logger.map_level(log)
118
+ h[:level] = logger.level_map[log.level]
108
119
  h[:level_str] = log.level.to_s
109
120
  h[:duration_str] = h.delete(:duration)
110
- h[:short_message] = short_message
111
121
  h
112
122
  end
113
123
 
@@ -115,13 +125,8 @@ class SemanticLogger::Appender::Graylog < SemanticLogger::Subscriber
115
125
  def log(log)
116
126
  return false unless should_log?(log)
117
127
 
118
- @notifier.notify!(formatter.call(log, self))
128
+ notifier.notify!(formatter.call(log, self))
119
129
  true
120
130
  end
121
131
 
122
- # Returns the Graylog level for the supplied log message
123
- def map_level(log)
124
- LEVEL_MAP[log.level]
125
- end
126
-
127
132
  end
@@ -10,7 +10,7 @@ end
10
10
  # SemanticLogger.add_appender(appender: :honeybadger)
11
11
  #
12
12
  class SemanticLogger::Appender::Honeybadger < SemanticLogger::Subscriber
13
- # Create Appender
13
+ # Honeybadger Appender
14
14
  #
15
15
  # Parameters
16
16
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
@@ -35,10 +35,8 @@ class SemanticLogger::Appender::Honeybadger < SemanticLogger::Subscriber
35
35
  # application: [String]
36
36
  # Name of this application to appear in log messages.
37
37
  # Default: SemanticLogger.application
38
- def initialize(options = {}, &block)
39
- options = options.is_a?(Hash) ? options.dup : {level: options}
40
- options[:level] ||= :error
41
- super(options, &block)
38
+ def initialize(level: :error, formatter: nil, filter: nil, application: nil, host: nil, &block)
39
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
42
40
  end
43
41
 
44
42
  # Send an error notification to honeybadger
@@ -77,18 +77,17 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
77
77
  #
78
78
  # continue_timeout: [Float]
79
79
  # Default: 1.0
80
- def initialize(options, &block)
81
- options = options.dup
82
- @url = options.delete(:url)
83
- @ssl_options = options.delete(:ssl)
84
- @username = options.delete(:username)
85
- @password = options.delete(:password)
86
- @compress = options.delete(:compress) || false
87
- @open_timeout = options.delete(:open_timeout) || 2.0
88
- @read_timeout = options.delete(:read_timeout) || 1.0
89
- @continue_timeout = options.delete(:continue_timeout) || 1.0
90
-
91
- raise(ArgumentError, 'Missing mandatory parameter :url') unless @url
80
+ def initialize(url:, compress: false, ssl: {}, username: nil, password: nil, open_timeout: 2.0, read_timeout: 1.0, continue_timeout: 1.0,
81
+ level: nil, formatter: nil, filter: nil, application: nil, host: nil, &block)
82
+
83
+ @url = url
84
+ @ssl_options = ssl
85
+ @username = username
86
+ @password = password
87
+ @compress = compress
88
+ @open_timeout = open_timeout
89
+ @read_timeout = read_timeout
90
+ @continue_timeout = continue_timeout
92
91
 
93
92
  @header = {
94
93
  'Accept' => 'application/json',
@@ -111,7 +110,6 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
111
110
  @path = '/' if @path == ''
112
111
 
113
112
  if uri.scheme == 'https'
114
- @ssl_options ||= {}
115
113
  @ssl_options[:use_ssl] = true
116
114
  @ssl_options[:verify_mode] ||= OpenSSL::SSL::VERIFY_PEER
117
115
  @port ||= HTTP.https_default_port
@@ -120,8 +118,7 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
120
118
  end
121
119
  @http = nil
122
120
 
123
- # Pass on the level and custom formatter if supplied
124
- super(options)
121
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
125
122
  reopen
126
123
  end
127
124
 
@@ -0,0 +1,183 @@
1
+ begin
2
+ require 'kafka'
3
+ rescue LoadError
4
+ raise 'Gem ruby-kafka is required for logging to Elasticsearch. Please add the gem "ruby-kafka" to your Gemfile.'
5
+ end
6
+
7
+ require 'date'
8
+
9
+ # Forward all log messages to Apache Kafka.
10
+ #
11
+ # Example:
12
+ #
13
+ # SemanticLogger.add_appender(
14
+ # appender: :kafka,
15
+ #
16
+ # # At least one of these nodes must be available:
17
+ # seed_brokers: ["kafka1:9092", "kafka2:9092"],
18
+ #
19
+ # # Set an optional client id in order to identify the client to Kafka:
20
+ # client_id: "my-application",
21
+ # )
22
+ class SemanticLogger::Appender::Kafka < SemanticLogger::Subscriber
23
+ attr_accessor :seed_brokers, :client_id, :connect_timeout, :socket_timeout,
24
+ :ssl_ca_cert, :ssl_client_cert, :ssl_client_cert_key,
25
+ :delivery_threshold, :delivery_interval,
26
+ :topic, :partition, :partition_key, :key
27
+
28
+ # Send log messages to Kafka in JSON format.
29
+ #
30
+ # Kafka Parameters:
31
+ #
32
+ # seed_brokers: [Array<String>, String]
33
+ # The list of brokers used to initialize the client. Either an Array of connections,
34
+ # or a comma separated string of connections.
35
+ # Connections can either be a string of "port:protocol" or a full URI with a scheme.
36
+ # If there's a scheme it's ignored and only host/port are used.
37
+ #
38
+ # client_id: [String]
39
+ # The identifier for this application.
40
+ # Default: semantic-logger
41
+ #
42
+ # topic: [String]
43
+ # Topic to publish log messages to.
44
+ # Default: 'log_messages'
45
+ #
46
+ # partition: [Integer]
47
+ # The partition that the message should be written to.
48
+ # Default: nil
49
+ #
50
+ # partition_key: [String]
51
+ # The key that should be used to assign a partition.
52
+ # Default: nil
53
+ #
54
+ # key: [String]
55
+ # The message key.
56
+ # Default: nil
57
+ #
58
+ # connect_timeout: [Integer]
59
+ # The timeout setting for connecting to brokers.
60
+ # Default: nil
61
+ #
62
+ # socket_timeout: [Integer]
63
+ # The timeout setting for socket connections.
64
+ # Default: nil
65
+ #
66
+ # ssl_ca_cert: [String, Array<String>]
67
+ # A PEM encoded CA cert, or an Array of PEM encoded CA certs, to use with a SSL connection.
68
+ # Default: nil
69
+ #
70
+ # ssl_client_cert: [String]
71
+ # A PEM encoded client cert to use with a SSL connection.
72
+ # Must be used in combination with ssl_client_cert_key.
73
+ # Default: nil
74
+ #
75
+ # ssl_client_cert_key [String]
76
+ # A PEM encoded client cert key to use with a SSL connection.
77
+ # Must be used in combination with ssl_client_cert.
78
+ # Default: nil
79
+ #
80
+ # delivery_threshold: [Integer]
81
+ # Number of messages between triggering a delivery of messages to Apache Kafka.
82
+ # Default: 100
83
+ #
84
+ # delivery_interval: [Integer]
85
+ # Number of seconds between triggering a delivery of messages to Apache Kafka.
86
+ # Default: 5
87
+ #
88
+ # Semantic Logger Parameters:
89
+ #
90
+ # level: [:trace | :debug | :info | :warn | :error | :fatal]
91
+ # Override the log level for this appender.
92
+ # Default: SemanticLogger.default_level
93
+ #
94
+ # formatter: [Object|Proc|Symbol|Hash]
95
+ # An instance of a class that implements #call, or a Proc to be used to format
96
+ # the output from this appender
97
+ # Default: :raw_json (See: #call)
98
+ #
99
+ # filter: [Regexp|Proc]
100
+ # RegExp: Only include log messages where the class name matches the supplied.
101
+ # regular expression. All other messages will be ignored.
102
+ # Proc: Only include log messages where the supplied Proc returns true
103
+ # The Proc must return true or false.
104
+ #
105
+ # host: [String]
106
+ # Name of this host to appear in log messages.
107
+ # Default: SemanticLogger.host
108
+ #
109
+ # application: [String]
110
+ # Name of this application to appear in log messages.
111
+ # Default: SemanticLogger.application
112
+ def initialize(seed_brokers:, client_id: 'semantic-logger', connect_timeout: nil, socket_timeout: nil,
113
+ ssl_ca_cert: nil, ssl_client_cert: nil, ssl_client_cert_key: nil,
114
+ topic: 'log_messages', partition: nil, partition_key: nil, key: nil,
115
+ delivery_threshold: 100, delivery_interval: 10,
116
+ level: nil, formatter: nil, filter: nil, application: nil, host: nil, &block)
117
+
118
+ @seed_brokers = seed_brokers
119
+ @client_id = @client_id
120
+ @connect_timeout = connect_timeout
121
+ @socket_timeout = socket_timeout
122
+ @ssl_ca_cert = ssl_ca_cert
123
+ @ssl_client_cert = ssl_client_cert
124
+ @ssl_client_cert_key = ssl_client_cert_key
125
+ @topic = topic
126
+ @partition = partition
127
+ @partition_key = partition_key
128
+ @key = key
129
+ @delivery_threshold = delivery_threshold
130
+ @delivery_interval = delivery_interval
131
+
132
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
133
+ reopen
134
+ end
135
+
136
+ def reopen
137
+ @kafka = ::Kafka.new(
138
+ seed_brokers: seed_brokers,
139
+ client_id: client_id,
140
+ connect_timeout: connect_timeout,
141
+ socket_timeout: socket_timeout,
142
+ ssl_ca_cert: ssl_ca_cert,
143
+ ssl_client_cert: ssl_client_cert,
144
+ ssl_client_cert_key: ssl_client_cert_key,
145
+ logger: SemanticLogger::Processor.logger.clone
146
+ )
147
+
148
+ @producer = @kafka.async_producer(
149
+ delivery_threshold: delivery_threshold,
150
+ delivery_interval: delivery_interval
151
+ )
152
+ end
153
+
154
+ def close
155
+ @producer.shutdown if @producer
156
+ @producer = nil
157
+ @kafka.close if @kafka
158
+ @kafka = nil
159
+ end
160
+
161
+ # Forward log messages to Kafka producer thread.
162
+ def log(log)
163
+ return false unless should_log?(log)
164
+
165
+ json = formatter.call(log, self)
166
+ @producer.produce(json, topic: topic, partition: partition, partition_key: partition_key, key: key)
167
+ end
168
+
169
+ # Use JSON Formatter by default.
170
+ def default_formatter
171
+ SemanticLogger::Formatters::Json.new
172
+ end
173
+
174
+ # Restart producer thread since there is no other way to flush.
175
+ def flush
176
+ @producer.shutdown
177
+ @producer = @kafka.async_producer(
178
+ delivery_threshold: delivery_threshold,
179
+ delivery_interval: delivery_interval
180
+ )
181
+ end
182
+
183
+ end
@@ -104,7 +104,8 @@ module SemanticLogger
104
104
  # Name of this application to appear in log messages.
105
105
  # Default: SemanticLogger.application
106
106
  def initialize(uri:, collection_name: 'semantic_logger', write_concern: 0, collection_size: 1024**3, collection_max: nil,
107
- level: nil, formatter: nil, filter: nil, host: SemanticLogger.host, application: SemanticLogger.application, &block)
107
+ level: nil, formatter: nil, filter: nil, host: nil, application: nil, &block)
108
+
108
109
  @client = Mongo::Client.new(uri, logger: SemanticLogger::Processor.logger.clone)
109
110
  @collection_name = collection_name
110
111
  @options = {
@@ -119,8 +120,7 @@ module SemanticLogger
119
120
  # Create the collection and necessary indexes
120
121
  create_indexes
121
122
 
122
- # Set the log level and formatter
123
- super(level: level, formatter: formatter, filter: filter, host: host, application: application, &block)
123
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
124
124
  end
125
125
 
126
126
  # After forking an active process call #reopen to re-open