semantic_logger 4.1.1 → 4.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/semantic_logger.rb +6 -13
- data/lib/semantic_logger/ansi_colors.rb +10 -10
- data/lib/semantic_logger/appender.rb +42 -26
- data/lib/semantic_logger/appender/async.rb +179 -0
- data/lib/semantic_logger/appender/async_batch.rb +95 -0
- data/lib/semantic_logger/appender/bugsnag.rb +2 -1
- data/lib/semantic_logger/appender/elasticsearch.rb +113 -81
- data/lib/semantic_logger/appender/elasticsearch_http.rb +1 -3
- data/lib/semantic_logger/appender/file.rb +1 -3
- data/lib/semantic_logger/appender/graylog.rb +6 -5
- data/lib/semantic_logger/appender/honeybadger.rb +0 -2
- data/lib/semantic_logger/appender/http.rb +25 -10
- data/lib/semantic_logger/appender/kafka.rb +1 -3
- data/lib/semantic_logger/appender/mongodb.rb +1 -3
- data/lib/semantic_logger/appender/new_relic.rb +7 -3
- data/lib/semantic_logger/appender/sentry.rb +6 -7
- data/lib/semantic_logger/appender/splunk.rb +1 -2
- data/lib/semantic_logger/appender/splunk_http.rb +3 -4
- data/lib/semantic_logger/appender/syslog.rb +1 -3
- data/lib/semantic_logger/appender/tcp.rb +7 -9
- data/lib/semantic_logger/appender/udp.rb +0 -2
- data/lib/semantic_logger/appender/wrapper.rb +0 -2
- data/lib/semantic_logger/base.rb +76 -19
- data/lib/semantic_logger/formatters.rb +37 -0
- data/lib/semantic_logger/formatters/base.rb +10 -3
- data/lib/semantic_logger/formatters/json.rb +2 -6
- data/lib/semantic_logger/formatters/one_line.rb +18 -0
- data/lib/semantic_logger/formatters/raw.rb +8 -2
- data/lib/semantic_logger/formatters/signalfx.rb +169 -0
- data/lib/semantic_logger/log.rb +23 -14
- data/lib/semantic_logger/loggable.rb +88 -15
- data/lib/semantic_logger/logger.rb +0 -20
- data/lib/semantic_logger/metric/new_relic.rb +75 -0
- data/lib/semantic_logger/metric/signalfx.rb +123 -0
- data/lib/semantic_logger/{metrics → metric}/statsd.rb +20 -8
- data/lib/semantic_logger/processor.rb +67 -169
- data/lib/semantic_logger/semantic_logger.rb +7 -31
- data/lib/semantic_logger/subscriber.rb +32 -36
- data/lib/semantic_logger/utils.rb +47 -0
- data/lib/semantic_logger/version.rb +1 -1
- data/test/appender/async_batch_test.rb +61 -0
- data/test/appender/async_test.rb +45 -0
- data/test/appender/elasticsearch_http_test.rb +3 -3
- data/test/appender/elasticsearch_test.rb +211 -49
- data/test/appender/file_test.rb +9 -8
- data/test/appender/mongodb_test.rb +3 -3
- data/test/appender/newrelic_rpm.rb +6 -0
- data/test/appender/sentry_test.rb +3 -1
- data/test/appender/wrapper_test.rb +29 -0
- data/test/concerns/compatibility_test.rb +64 -60
- data/test/debug_as_trace_logger_test.rb +62 -77
- data/test/formatters/one_line_test.rb +61 -0
- data/test/formatters/signalfx_test.rb +200 -0
- data/test/formatters_test.rb +36 -0
- data/test/in_memory_appender.rb +9 -0
- data/test/in_memory_appender_helper.rb +43 -0
- data/test/in_memory_batch_appender.rb +9 -0
- data/test/in_memory_metrics_appender.rb +14 -0
- data/test/loggable_test.rb +15 -30
- data/test/logger_test.rb +181 -135
- data/test/measure_test.rb +212 -113
- data/test/metric/new_relic_test.rb +36 -0
- data/test/metric/signalfx_test.rb +78 -0
- data/test/semantic_logger_test.rb +58 -65
- data/test/test_helper.rb +19 -2
- metadata +33 -7
- data/lib/semantic_logger/metrics/new_relic.rb +0 -30
- data/lib/semantic_logger/metrics/udp.rb +0 -80
- data/test/mock_logger.rb +0 -29
@@ -15,15 +15,11 @@ require 'date'
|
|
15
15
|
# url: 'http://localhost:9200'
|
16
16
|
# )
|
17
17
|
class SemanticLogger::Appender::Elasticsearch < SemanticLogger::Subscriber
|
18
|
-
attr_accessor :url, :index, :type, :client, :flush_interval, :timeout_interval, :batch_size
|
18
|
+
attr_accessor :url, :index, :type, :client, :flush_interval, :timeout_interval, :batch_size, :elasticsearch_args
|
19
19
|
|
20
20
|
# Create Elasticsearch appender over persistent HTTP(S)
|
21
21
|
#
|
22
22
|
# Parameters:
|
23
|
-
# url: [String]
|
24
|
-
# Fully qualified address to the Elasticsearch service.
|
25
|
-
# Default: 'http://localhost:9200'
|
26
|
-
#
|
27
23
|
# index: [String]
|
28
24
|
# Prefix of the index to store the logs in Elasticsearch.
|
29
25
|
# The final index appends the date so that indexes are used per day.
|
@@ -34,18 +30,6 @@ class SemanticLogger::Appender::Elasticsearch < SemanticLogger::Subscriber
|
|
34
30
|
# Document type to associate with logs when they are written.
|
35
31
|
# Default: 'log'
|
36
32
|
#
|
37
|
-
# batch_size: [Fixnum]
|
38
|
-
# Size of list when sending to Elasticsearch. May be smaller if flush is triggered early.
|
39
|
-
# Default: 500
|
40
|
-
#
|
41
|
-
# flush_interval: [Fixnum]
|
42
|
-
# Seconds to wait before attempting a flush to Elasticsearch. If no messages queued it's a NOOP.
|
43
|
-
# Default: 1
|
44
|
-
#
|
45
|
-
# timeout_interval: [Fixnum]
|
46
|
-
# Seconds to allow the Elasticsearch client to flush the bulk message.
|
47
|
-
# Default: 10
|
48
|
-
#
|
49
33
|
# level: [:trace | :debug | :info | :warn | :error | :fatal]
|
50
34
|
# Override the log level for this appender.
|
51
35
|
# Default: SemanticLogger.default_level
|
@@ -68,88 +52,136 @@ class SemanticLogger::Appender::Elasticsearch < SemanticLogger::Subscriber
|
|
68
52
|
# application: [String]
|
69
53
|
# Name of this application to appear in log messages.
|
70
54
|
# Default: SemanticLogger.application
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
55
|
+
#
|
56
|
+
# Elasticsearch Parameters:
|
57
|
+
# url: [String]
|
58
|
+
# Fully qualified address to the Elasticsearch service.
|
59
|
+
# Default: 'http://localhost:9200'
|
60
|
+
#
|
61
|
+
# hosts: [String|Hash|Array]
|
62
|
+
# Single host passed as a String or Hash, or multiple hosts
|
63
|
+
# passed as an Array; `host` or `url` keys are also valid.
|
64
|
+
# Note:
|
65
|
+
# :url above is ignored when supplying this option.
|
66
|
+
#
|
67
|
+
# resurrect_after [Float]
|
68
|
+
# After how many seconds a dead connection should be tried again.
|
69
|
+
#
|
70
|
+
# reload_connections [true|false|Integer]
|
71
|
+
# Reload connections after X requests.
|
72
|
+
# Default: false
|
73
|
+
#
|
74
|
+
# randomize_hosts [true|false]
|
75
|
+
# Shuffle connections on initialization and reload.
|
76
|
+
# Default: false
|
77
|
+
#
|
78
|
+
# sniffer_timeout [Integer]
|
79
|
+
# Timeout for reloading connections in seconds.
|
80
|
+
# Default: 1
|
81
|
+
#
|
82
|
+
# retry_on_failure [true|false|Integer]
|
83
|
+
# Retry X times when request fails before raising and exception.
|
84
|
+
# Default: false
|
85
|
+
#
|
86
|
+
# retry_on_status [Array<Number>]
|
87
|
+
# Retry when specific status codes are returned.
|
88
|
+
#
|
89
|
+
# reload_on_failure [true|false]
|
90
|
+
# Reload connections after failure.
|
91
|
+
# Default: false
|
92
|
+
#
|
93
|
+
# request_timeout [Integer]
|
94
|
+
# The request timeout to be passed to transport in options.
|
95
|
+
#
|
96
|
+
# adapter [Symbol]
|
97
|
+
# A specific adapter for Faraday (e.g. `:patron`)
|
98
|
+
#
|
99
|
+
# transport_options [Hash]
|
100
|
+
# Options to be passed to the `Faraday::Connection` constructor.
|
101
|
+
#
|
102
|
+
# transport_class [Constant]
|
103
|
+
# A specific transport class to use, will be initialized by
|
104
|
+
# the client and passed hosts and all arguments.
|
105
|
+
#
|
106
|
+
# transport [Object]
|
107
|
+
# A specific transport instance.
|
108
|
+
#
|
109
|
+
# serializer_class [Constant]
|
110
|
+
# A specific serializer class to use, will be initialized by
|
111
|
+
# the transport and passed the transport instance.
|
112
|
+
#
|
113
|
+
# selector [Elasticsearch::Transport::Transport::Connections::Selector::Base]
|
114
|
+
# An instance of selector strategy derived from `Elasticsearch::Transport::Transport::Connections::Selector::Base`.
|
115
|
+
#
|
116
|
+
# send_get_body_as [String]
|
117
|
+
# Specify the HTTP method to use for GET requests with a body.
|
118
|
+
# Default: 'GET'
|
119
|
+
def initialize(url: 'http://localhost:9200',
|
120
|
+
index: 'semantic_logger',
|
121
|
+
type: 'log',
|
122
|
+
level: nil,
|
123
|
+
formatter: nil,
|
124
|
+
filter: nil,
|
125
|
+
application: nil,
|
126
|
+
host: nil,
|
127
|
+
**elasticsearch_args,
|
128
|
+
&block)
|
129
|
+
|
130
|
+
@url = url
|
131
|
+
@index = index
|
132
|
+
@type = type
|
133
|
+
@elasticsearch_args = elasticsearch_args.dup
|
134
|
+
@elasticsearch_args[:url] = url if url && !elasticsearch_args[:hosts]
|
135
|
+
@elasticsearch_args[:logger] = logger
|
83
136
|
|
84
137
|
super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
|
85
138
|
reopen
|
86
139
|
end
|
87
140
|
|
88
141
|
def reopen
|
89
|
-
@client = Elasticsearch::Client.new(
|
90
|
-
|
91
|
-
@messages_mutex.synchronize { @messages = [] }
|
92
|
-
|
93
|
-
@flush_task = Concurrent::TimerTask.new(execution_interval: flush_interval, timeout_interval: timeout_interval) do
|
94
|
-
flush
|
95
|
-
end.execute
|
96
|
-
end
|
97
|
-
|
98
|
-
def close
|
99
|
-
@flush_task.shutdown if @flush_task
|
100
|
-
@flush_task = nil
|
101
|
-
# No api to close connections in the elasticsearch client!
|
102
|
-
#@client.close if @client
|
103
|
-
#@client = nil
|
142
|
+
@client = Elasticsearch::Client.new(@elasticsearch_args)
|
104
143
|
end
|
105
144
|
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
145
|
+
# Log to the index for today
|
146
|
+
def log(log)
|
147
|
+
bulk_payload = formatter.call(log, self)
|
148
|
+
write_to_elasticsearch([bulk_index(log), bulk_payload])
|
149
|
+
true
|
111
150
|
end
|
112
151
|
|
113
|
-
def
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
152
|
+
def batch(logs)
|
153
|
+
messages = []
|
154
|
+
day = nil
|
155
|
+
logs.each do |log|
|
156
|
+
# Only write the bulk index once per day per batch. Supports mixed dates in a batch.
|
157
|
+
if log.time.day != day
|
158
|
+
messages << bulk_index(log)
|
159
|
+
day = log.time.day
|
119
160
|
end
|
161
|
+
messages << formatter.call(log, self)
|
120
162
|
end
|
121
163
|
|
122
|
-
|
123
|
-
|
124
|
-
if bulk_result["errors"]
|
125
|
-
failed = bulk_result["items"].select { |x| x["status"] != 201 }
|
126
|
-
SemanticLogger::Processor.logger.error("ElasticSearch: Write failed. Messages discarded. : #{failed}")
|
127
|
-
end
|
128
|
-
end
|
129
|
-
rescue Exception => exc
|
130
|
-
SemanticLogger::Processor.logger.error('ElasticSearch: Failed to bulk insert log messages', exc)
|
164
|
+
write_to_elasticsearch(messages)
|
165
|
+
true
|
131
166
|
end
|
132
167
|
|
133
|
-
|
134
|
-
def log(log)
|
135
|
-
return false unless should_log?(log)
|
136
|
-
|
137
|
-
daily_index = log.time.strftime("#{@index}-%Y.%m.%d")
|
168
|
+
private
|
138
169
|
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
170
|
+
def write_to_elasticsearch(messages)
|
171
|
+
bulk_result = @client.bulk(body: messages)
|
172
|
+
if bulk_result["errors"]
|
173
|
+
failed = bulk_result["items"].select { |x| x["status"] != 201 }
|
174
|
+
logger.error("ElasticSearch: Write failed. Messages discarded. : #{failed}")
|
175
|
+
end
|
143
176
|
end
|
144
177
|
|
145
|
-
def
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
@messages.push(bulk_payload)
|
150
|
-
@messages.length
|
151
|
-
end
|
178
|
+
def bulk_index(log)
|
179
|
+
daily_index = log.time.strftime("#{index}-%Y.%m.%d")
|
180
|
+
{'index' => {'_index' => daily_index, '_type' => type}}
|
181
|
+
end
|
152
182
|
|
153
|
-
|
183
|
+
def default_formatter
|
184
|
+
SemanticLogger::Formatters::Raw.new(time_format: :iso_8601, time_key: :timestamp)
|
154
185
|
end
|
186
|
+
|
155
187
|
end
|
@@ -54,7 +54,7 @@ class SemanticLogger::Appender::ElasticsearchHttp < SemanticLogger::Appender::Ht
|
|
54
54
|
|
55
55
|
@index = index
|
56
56
|
@type = type
|
57
|
-
super(url: url, compress: compress, ssl: ssl,
|
57
|
+
super(url: url, compress: compress, ssl: ssl, read_timeout: read_timeout, open_timeout: open_timeout, continue_timeout: continue_timeout,
|
58
58
|
level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
|
59
59
|
|
60
60
|
@request_path = "#{@path.end_with?('/') ? @path : "#{@path}/"}#{@index}-%Y.%m.%d"
|
@@ -63,8 +63,6 @@ class SemanticLogger::Appender::ElasticsearchHttp < SemanticLogger::Appender::Ht
|
|
63
63
|
|
64
64
|
# Log to the index for today.
|
65
65
|
def log(log)
|
66
|
-
return false unless should_log?(log)
|
67
|
-
|
68
66
|
post(formatter.call(log, self), log.time.strftime(@logging_path))
|
69
67
|
end
|
70
68
|
|
@@ -81,7 +81,7 @@ module SemanticLogger
|
|
81
81
|
def reopen
|
82
82
|
return unless @file_name
|
83
83
|
|
84
|
-
@log
|
84
|
+
@log = open(@file_name, (::File::WRONLY | ::File::APPEND | ::File::CREAT))
|
85
85
|
# Force all log entries to write immediately without buffering
|
86
86
|
# Allows multiple processes to write to the same log file simultaneously
|
87
87
|
@log.sync = true
|
@@ -93,8 +93,6 @@ module SemanticLogger
|
|
93
93
|
# trace entries are mapped to debug since :trace is not supported by the
|
94
94
|
# Ruby or Rails Loggers
|
95
95
|
def log(log)
|
96
|
-
return false unless should_log?(log)
|
97
|
-
|
98
96
|
# Since only one appender thread will be writing to the file at a time
|
99
97
|
# it is not necessary to protect access to the file with a semaphore
|
100
98
|
# Allow this logger to filter out log levels lower than it's own
|
@@ -110,11 +110,9 @@ class SemanticLogger::Appender::Graylog < SemanticLogger::Subscriber
|
|
110
110
|
|
111
111
|
# Returns [Hash] of parameters to send
|
112
112
|
def call(log, logger)
|
113
|
-
h =
|
114
|
-
h.delete(:time)
|
113
|
+
h = default_formatter.call(log, logger)
|
115
114
|
|
116
115
|
h[:short_message] = h.delete(:message) || log.exception.message
|
117
|
-
h[:timestamp] = log.time.utc.to_f
|
118
116
|
h[:level] = logger.level_map[log.level]
|
119
117
|
h[:level_str] = log.level.to_s
|
120
118
|
h[:duration_str] = h.delete(:duration)
|
@@ -123,10 +121,13 @@ class SemanticLogger::Appender::Graylog < SemanticLogger::Subscriber
|
|
123
121
|
|
124
122
|
# Forward log messages
|
125
123
|
def log(log)
|
126
|
-
return false unless should_log?(log)
|
127
|
-
|
128
124
|
notifier.notify!(formatter.call(log, self))
|
129
125
|
true
|
130
126
|
end
|
131
127
|
|
128
|
+
private
|
129
|
+
|
130
|
+
def default_formatter
|
131
|
+
SemanticLogger::Formatters::Raw.new(time_format: :seconds, time_key: :timestamp)
|
132
|
+
end
|
132
133
|
end
|
@@ -41,8 +41,6 @@ class SemanticLogger::Appender::Honeybadger < SemanticLogger::Subscriber
|
|
41
41
|
|
42
42
|
# Send an error notification to honeybadger
|
43
43
|
def log(log)
|
44
|
-
return false unless should_log?(log)
|
45
|
-
|
46
44
|
context = formatter.call(log, self)
|
47
45
|
if log.exception
|
48
46
|
context.delete(:exception)
|
@@ -2,6 +2,7 @@ require 'net/http'
|
|
2
2
|
require 'uri'
|
3
3
|
require 'socket'
|
4
4
|
require 'json'
|
5
|
+
require 'openssl'
|
5
6
|
|
6
7
|
# Log to any HTTP(S) server that accepts log messages in JSON form
|
7
8
|
#
|
@@ -16,7 +17,7 @@ require 'json'
|
|
16
17
|
# url: 'http://localhost:8088/path'
|
17
18
|
# )
|
18
19
|
class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
|
19
|
-
attr_accessor :username, :
|
20
|
+
attr_accessor :username, :compress, :header,
|
20
21
|
:open_timeout, :read_timeout, :continue_timeout
|
21
22
|
attr_reader :http, :url, :server, :port, :path, :ssl_options
|
22
23
|
|
@@ -77,8 +78,20 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
|
|
77
78
|
#
|
78
79
|
# continue_timeout: [Float]
|
79
80
|
# Default: 1.0
|
80
|
-
def initialize(url:,
|
81
|
-
|
81
|
+
def initialize(url:,
|
82
|
+
compress: false,
|
83
|
+
ssl: {},
|
84
|
+
username: nil,
|
85
|
+
password: nil,
|
86
|
+
open_timeout: 2.0,
|
87
|
+
read_timeout: 1.0,
|
88
|
+
continue_timeout: 1.0,
|
89
|
+
level: nil,
|
90
|
+
formatter: nil,
|
91
|
+
filter: nil,
|
92
|
+
application: nil,
|
93
|
+
host: nil,
|
94
|
+
&block)
|
82
95
|
|
83
96
|
@url = url
|
84
97
|
@ssl_options = ssl
|
@@ -89,10 +102,10 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
|
|
89
102
|
@read_timeout = read_timeout
|
90
103
|
@continue_timeout = continue_timeout
|
91
104
|
|
105
|
+
# On Ruby v2.0 and greater, Net::HTTP.new already uses a persistent connection if the server allows it
|
92
106
|
@header = {
|
93
107
|
'Accept' => 'application/json',
|
94
108
|
'Content-Type' => 'application/json',
|
95
|
-
# On Ruby v2.0 and greater, Net::HTTP.new already uses a persistent connection if the server allows it
|
96
109
|
'Connection' => 'keep-alive',
|
97
110
|
'Keep-Alive' => '300'
|
98
111
|
}
|
@@ -148,9 +161,9 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
|
|
148
161
|
|
149
162
|
# Forward log messages to HTTP Server
|
150
163
|
def log(log)
|
151
|
-
|
152
|
-
|
153
|
-
post(
|
164
|
+
message = formatter.call(log, self)
|
165
|
+
logger.trace(message)
|
166
|
+
post(message)
|
154
167
|
end
|
155
168
|
|
156
169
|
private
|
@@ -189,8 +202,7 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
|
|
189
202
|
# Process HTTP Request
|
190
203
|
def process_request(request, body = nil)
|
191
204
|
if body
|
192
|
-
body
|
193
|
-
request.body = body
|
205
|
+
request.body = compress ? compress_data(body) : body
|
194
206
|
end
|
195
207
|
request.basic_auth(@username, @password) if @username
|
196
208
|
response = @http.request(request)
|
@@ -198,9 +210,12 @@ class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
|
|
198
210
|
true
|
199
211
|
else
|
200
212
|
# Failures are logged to the global semantic logger failsafe logger (Usually stderr or file)
|
201
|
-
|
213
|
+
logger.error("Bad HTTP response from: #{url} code: #{response.code}, #{response.body}")
|
202
214
|
false
|
203
215
|
end
|
216
|
+
rescue RuntimeError => exc
|
217
|
+
reopen
|
218
|
+
raise exc
|
204
219
|
end
|
205
220
|
|
206
221
|
end
|
@@ -142,7 +142,7 @@ class SemanticLogger::Appender::Kafka < SemanticLogger::Subscriber
|
|
142
142
|
ssl_ca_cert: ssl_ca_cert,
|
143
143
|
ssl_client_cert: ssl_client_cert,
|
144
144
|
ssl_client_cert_key: ssl_client_cert_key,
|
145
|
-
logger:
|
145
|
+
logger: logger
|
146
146
|
)
|
147
147
|
|
148
148
|
@producer = @kafka.async_producer(
|
@@ -160,8 +160,6 @@ class SemanticLogger::Appender::Kafka < SemanticLogger::Subscriber
|
|
160
160
|
|
161
161
|
# Forward log messages to Kafka producer thread.
|
162
162
|
def log(log)
|
163
|
-
return false unless should_log?(log)
|
164
|
-
|
165
163
|
json = formatter.call(log, self)
|
166
164
|
@producer.produce(json, topic: topic, partition: partition, partition_key: partition_key, key: key)
|
167
165
|
end
|
@@ -106,7 +106,7 @@ module SemanticLogger
|
|
106
106
|
def initialize(uri:, collection_name: 'semantic_logger', write_concern: 0, collection_size: 1024**3, collection_max: nil,
|
107
107
|
level: nil, formatter: nil, filter: nil, host: nil, application: nil, &block)
|
108
108
|
|
109
|
-
@client = Mongo::Client.new(uri, logger:
|
109
|
+
@client = Mongo::Client.new(uri, logger: logger)
|
110
110
|
@collection_name = collection_name
|
111
111
|
@options = {
|
112
112
|
capped: true,
|
@@ -161,8 +161,6 @@ module SemanticLogger
|
|
161
161
|
|
162
162
|
# Log the message to MongoDB
|
163
163
|
def log(log)
|
164
|
-
return false unless should_log?(log)
|
165
|
-
|
166
164
|
# Insert log entry into Mongo
|
167
165
|
collection.insert_one(formatter.call(log, self))
|
168
166
|
true
|
@@ -29,7 +29,13 @@ class SemanticLogger::Appender::NewRelic < SemanticLogger::Subscriber
|
|
29
29
|
# regular expression. All other messages will be ignored.
|
30
30
|
# Proc: Only include log messages where the supplied Proc returns true
|
31
31
|
# The Proc must return true or false.
|
32
|
-
def initialize(level: :error,
|
32
|
+
def initialize(level: :error,
|
33
|
+
formatter: nil,
|
34
|
+
filter: nil,
|
35
|
+
application: nil,
|
36
|
+
host: nil,
|
37
|
+
&block)
|
38
|
+
|
33
39
|
super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
|
34
40
|
end
|
35
41
|
|
@@ -43,8 +49,6 @@ class SemanticLogger::Appender::NewRelic < SemanticLogger::Subscriber
|
|
43
49
|
|
44
50
|
# Send an error notification to New Relic
|
45
51
|
def log(log)
|
46
|
-
return false unless should_log?(log)
|
47
|
-
|
48
52
|
# Send error messages as Runtime exceptions
|
49
53
|
exception =
|
50
54
|
if log.exception
|